logstash-output-charrington 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bc7913c9e276dc1f40fa722cb271f21d0e19ab3270b7fbd854cc7057c6a50bc3
4
- data.tar.gz: 4be9c45b7e08e2ef3ab84b07109096305cfcc33a84b3cc9625292d8e572bf86b
3
+ metadata.gz: d8166adfa6e084d6e0fd56921edbc8b71dae22e6e5e282c86f73753692f14dbf
4
+ data.tar.gz: 5b0286ebc0540a5152a11ffacd441d465c6cc44b7c67e66a4b08f5c825588dca
5
5
  SHA512:
6
- metadata.gz: 13ed00f23566b13bd75c3935154d0c57336fe0be4e5ea1f80a0a529b068468159863fcd564e4f0ce730645d2a8ffe49d467f86fa6c0addd8b7fa8ac8b525e45a
7
- data.tar.gz: 242d0f1f8174e627f45d32924f7864d78969cb92f2f20141b9311a02e5d1cb329c9c4b13b62b79c60bb252df0df3fdb0d4eeed492609ddc0ba2e00d4a1176739
6
+ metadata.gz: b977663c714b04890525419c98773e80f2e55993ff3cfc0e50ca3dee6d2b06e81a948a39d1e0004ba156ed219482592ddb0f604514310a95fd6e9ac66312db5d
7
+ data.tar.gz: 32a8eee42bf6a88c3c3a244805546edc2a5cbe67626bd5b12450c2c963b1967f7aea2821cfbd068478a71a2364f4ca24e1fc5759c76f7ebb6bdffb64c5011e43
@@ -7,33 +7,40 @@ require 'java'
7
7
  require 'logstash-output-charrington_jars'
8
8
  require 'json'
9
9
  require 'bigdecimal'
10
+ require 'pry'
11
+ require File.join(File.dirname(__FILE__), "charrington/process")
12
+ require File.join(File.dirname(__FILE__), "charrington/transform")
13
+ require File.join(File.dirname(__FILE__), "charrington/insert")
10
14
 
11
15
  # Write events to a SQL engine, using JDBC.
12
- #
13
- # It is upto the user of the plugin to correctly configure the plugin. This
14
- # includes correctly crafting the SQL statement, and matching the number of
15
- # parameters correctly.
16
- #
16
+ # It is upto the user of the plugin to correctly configure the plugin.
17
+
18
+ # This class is responsible for setting things up, creating the connection,
19
+ # and handling retries. Charrington::Insert is where the insert
20
+ # is attempted. If that fails, it will try to either
21
+ # create a table via Charrington::CreateTable
22
+ # or alter an existing one via Charrington::AlterTable
23
+
17
24
  class LogStash::Outputs::Charrington < LogStash::Outputs::Base
18
25
  concurrency :shared
19
26
 
20
27
  STRFTIME_FMT = '%Y-%m-%d %T.%L'.freeze
21
28
 
22
- RETRYABLE_SQLSTATE_CLASSES = [
23
- # Classes of retryable SQLSTATE codes
24
- # Not all in the class will be retryable. However, this is the best that
25
- # we've got right now.
26
- # If a custom state code is required, set it in retry_sql_states.
27
- '08', # Connection Exception
28
- '24', # Invalid Cursor State (Maybe retry-able in some circumstances)
29
- '25', # Invalid Transaction State
30
- '40', # Transaction Rollback
31
- '53', # Insufficient Resources
32
- '54', # Program Limit Exceeded (MAYBE)
33
- '55', # Object Not In Prerequisite State
34
- '57', # Operator Intervention
35
- '58', # System Error
36
- ].freeze
29
+ # RETRYABLE_SQLSTATE_CLASSES = [
30
+ # # Classes of retryable SQLSTATE codes
31
+ # # Not all in the class will be retryable. However, this is the best that
32
+ # # we've got right now.
33
+ # # If a custom state code is required, set it in retry_sql_states.
34
+ # '08', # Connection Exception
35
+ # '24', # Invalid Cursor State (Maybe retry-able in some circumstances)
36
+ # '25', # Invalid Transaction State
37
+ # '40', # Transaction Rollback
38
+ # '53', # Insufficient Resources
39
+ # '54', # Program Limit Exceeded (MAYBE)
40
+ # '55', # Object Not In Prerequisite State
41
+ # '57', # Operator Intervention
42
+ # '58', # System Error
43
+ # ].freeze
37
44
 
38
45
  config_name 'charrington'
39
46
 
@@ -56,26 +63,12 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
56
63
  # jdbc password - optional, maybe in the connection string
57
64
  config :password, validate: :string, required: false
58
65
 
59
- # [ "insert into table (message) values(?)", "%{message}" ]
60
- config :statement, validate: :array, required: true
61
-
62
- # If this is an unsafe statement, use event.sprintf
63
- # This also has potential performance penalties due to having to create a
64
- # new statement for each event, rather than adding to the batch and issuing
65
- # multiple inserts in 1 go
66
- config :unsafe_statement, validate: :boolean, default: false
67
-
68
66
  # Number of connections in the pool to maintain
69
67
  config :max_pool_size, validate: :number, default: 5
70
68
 
71
69
  # Connection timeout
72
70
  config :connection_timeout, validate: :number, default: 10000
73
71
 
74
- # We buffer a certain number of events before flushing that out to SQL.
75
- # This setting controls how many events will be buffered before sending a
76
- # batch of events.
77
- config :flush_size, validate: :number, default: 1000
78
-
79
72
  # Set initial interval in seconds between retries. Doubled on each retry up to `retry_max_interval`
80
73
  config :retry_initial_interval, validate: :number, default: 2
81
74
 
@@ -94,7 +87,6 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
94
87
  # Maximum number of sequential failed attempts, before we stop retrying.
95
88
  # If set to < 1, then it will infinitely retry.
96
89
  # At the default values this is a little over 10 minutes
97
-
98
90
  config :max_flush_exceptions, validate: :number, default: 10
99
91
 
100
92
  config :max_repeat_exceptions, obsolete: 'This has been replaced by max_flush_exceptions - which behaves slightly differently. Please check the documentation.'
@@ -114,25 +106,23 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
114
106
  @logger.info('JDBC - Starting up')
115
107
 
116
108
  load_jar_files!
117
-
118
109
  @stopping = Concurrent::AtomicBoolean.new(false)
119
110
 
120
- @logger.warn('JDBC - Flush size is set to > 1000') if @flush_size > 1000
121
-
122
- if @statement.empty?
123
- @logger.error('JDBC - No statement provided. Configuration error.')
124
- end
125
-
126
- if !@unsafe_statement && @statement.length < 2
127
- @logger.error("JDBC - Statement has no parameters. No events will be inserted into SQL as you're not passing any event data. Likely configuration error.")
128
- end
129
-
130
111
  setup_and_test_pool!
131
112
  end
132
113
 
133
114
  def multi_receive(events)
134
- events.each_slice(@flush_size) do |slice|
135
- retrying_submit(slice)
115
+ events.each do |event|
116
+ connection = get_connection
117
+ break unless connection
118
+
119
+ opts = { connection: connection,
120
+ schema: @schema,
121
+ max_retries: @max_flush_exceptions,
122
+ retry_initial_interval: @retry_initial_interval }
123
+
124
+ Charrington::Process.call(connection, event, opts)
125
+ connection.close unless connection.nil?
136
126
  end
137
127
  end
138
128
 
@@ -145,19 +135,14 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
145
135
  private
146
136
 
147
137
  def setup_and_test_pool!
148
- # Setup pool
149
138
  @pool = Java::ComZaxxerHikari::HikariDataSource.new
150
-
151
- @pool.setAutoCommit(@driver_auto_commit)
152
139
  @pool.setDriverClassName(@driver_class) if @driver_class
153
-
154
- @pool.setJdbcUrl(@connection_string)
155
-
156
140
  @pool.setUsername(@username) if @username
157
141
  @pool.setPassword(@password) if @password
158
-
159
142
  @pool.setMaximumPoolSize(@max_pool_size)
160
143
  @pool.setConnectionTimeout(@connection_timeout)
144
+ @pool.setAutoCommit(@driver_auto_commit)
145
+ @pool.setJdbcUrl(@connection_string)
161
146
 
162
147
  validate_connection_timeout = (@connection_timeout / 1000) / 2
163
148
 
@@ -176,16 +161,15 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
176
161
  test_connection.close
177
162
  end
178
163
 
164
+ # Load jar from driver path
179
165
  def load_jar_files!
180
- # Load jar from driver path
181
166
  unless @driver_jar_path.nil?
182
167
  raise LogStash::ConfigurationError, 'JDBC - Could not find jar file at given path. Check config.' unless File.exist? @driver_jar_path
183
168
  require @driver_jar_path
184
169
  return
185
170
  end
186
171
 
187
- # Revert original behaviour of loading from vendor directory
188
- # if no path given
172
+ # Revert original behaviour of loading from vendor directory if no path given
189
173
  jarpath = if ENV['LOGSTASH_HOME']
190
174
  File.join(ENV['LOGSTASH_HOME'], '/vendor/jar/jdbc/*.jar')
191
175
  else
@@ -203,192 +187,11 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
203
187
  end
204
188
  end
205
189
 
206
- def create_statement(event)
207
- event = event.to_hash
208
- hashed = event.delete_if {|k, _v| k.start_with?("@") || k == 'host' || k == 'path' }
209
-
210
- columns = '(' + hashed.keys.join(', ') + ')'
211
-
212
- value_placeholders = ('?' * hashed.length).split('')
213
- values = '(' + value_placeholders.join(', ') + ')'
214
-
215
- table_name = create_table_name(event)
216
- return "INSERT INTO #{table_name} #{columns} VALUES #{values}", hashed.keys
217
- end
218
-
219
- def create_table_name(event)
220
- raise TableNameNil.new("Table name is nil", event) if event.nil?
221
-
222
- event = event.to_hash["event"].to_s.strip
223
- raise TableNameNil.new("Table name is nil", event) if event.empty?
224
-
225
- schema = @schema.empty? ? '' : "#{@schema}."
226
- "#{schema}#{event.gsub(/[ \-_]+/, "_").downcase}"
227
- end
228
-
229
- def prepared_statement(keys)
230
- keys.map do |key|
231
- turn_into_wrapped(key)
232
- end
233
- end
234
-
235
- def turn_into_wrapped(key)
236
- "[#{key}]"
237
- end
238
-
239
- def submit(events)
240
- connection = nil
241
- statement = nil
242
- events_to_retry = []
243
-
244
- begin
245
- connection = @pool.getConnection
246
- rescue => e
247
- log_jdbc_exception(e, true, nil)
248
- # If a connection is not available, then the server has gone away
249
- # We're not counting that towards our retry count.
250
- return events, false
251
- end
252
-
253
- events.each do |event|
254
-
255
-
256
- begin
257
- ins, columns = create_statement(event)
258
- keys_we_care_about = prepared_statement(columns)
259
- statement = connection.prepareStatement(
260
- ins
261
- )
262
- statement = add_statement_event_params(statement, event, keys_we_care_about)
263
- statement.execute
264
- rescue TableNameNil => e
265
- @logger.error("#{e.message} event=#{e.event}")
266
- rescue => e
267
- @logger.error "Rescue from SQLException #{e.message}"
268
- create_statement = make_create_statement(event, columns)
269
- puts 'create_statement'
270
- puts create_statement
271
-
272
- statement = connection.prepareStatement(
273
- create_statement
274
- )
275
- statement.execute
276
- @logger.debug('Created new Table.')
277
- events_to_retry.push(event)
278
- ensure
279
- statement.close unless statement.nil?
280
- end
281
- end
282
-
283
- connection.close unless connection.nil?
284
-
285
- return events_to_retry, true
286
- end
287
-
288
- def retrying_submit(actions)
289
- # Initially we submit the full list of actions
290
- submit_actions = actions
291
- count_as_attempt = true
292
-
293
- attempts = 1
294
-
295
- sleep_interval = @retry_initial_interval
296
- while @stopping.false? and (submit_actions and !submit_actions.empty?)
297
- return if !submit_actions || submit_actions.empty? # If everything's a success we move along
298
- # We retry whatever didn't succeed
299
- submit_actions, count_as_attempt = submit(submit_actions)
300
-
301
- # Everything was a success!
302
- break if !submit_actions || submit_actions.empty?
303
-
304
- if @max_flush_exceptions > 0 and count_as_attempt == true
305
- attempts += 1
306
-
307
- if attempts > @max_flush_exceptions
308
- @logger.error("JDBC - max_flush_exceptions has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are being dropped. See previously logged exceptions for details.")
309
- break
310
- end
311
- end
312
-
313
- # If we're retrying the action sleep for the recommended interval
314
- # Double the interval for the next time through to achieve exponential backoff
315
- Stud.stoppable_sleep(sleep_interval) { @stopping.true? }
316
- sleep_interval = next_sleep_interval(sleep_interval)
317
- end
318
- end
319
-
320
- def make_create_statement(event, keys_we_care_about)
321
- columns = []
322
-
323
- keys_we_care_about.each_with_index do |key, idx|
324
- wrapped = turn_into_wrapped(key)
325
-
326
- case event.get(wrapped)
327
- when Time, LogStash::Timestamp
328
- columns << "#{key} TIMESTAMP"
329
- when Integer
330
- columns << "#{key} BIGINT"
331
- when BigDecimal
332
- columns << "#{key} DECIMAL"
333
- when Float
334
- columns << "#{key} DOUBLE PRECISION"
335
- when String, Array, Hash
336
- columns << "#{key} VARCHAR"
337
- when true, false
338
- columns << "#{key} BOOLEAN"
339
- end
340
- end
341
-
342
- "CREATE TABLE IF NOT EXISTS #{create_table_name(event)} (#{columns.join(', ')})"
343
- end
344
-
345
- def add_statement_event_params(statement, event, keys_we_care_about)
346
- keys_we_care_about.each_with_index do |key, idx|
347
- if @enable_event_as_json_keyword == true and key.is_a? String and key == @event_as_json_keyword
348
- value = event.to_json
349
- elsif key.is_a? String
350
- value = event.get(key)
351
- if value.nil? and key =~ /%\{/
352
- value = event.sprintf(key)
353
- end
354
- else
355
- value = key
356
- end
357
-
358
- case value
359
- when Time
360
- statement.setString(idx + 1, value.strftime(STRFTIME_FMT))
361
- when LogStash::Timestamp
362
- statement.setString(idx + 1, value.time.strftime(STRFTIME_FMT))
363
- when Integer
364
- if value > 2147483647 or value < -2147483648
365
- statement.setLong(idx + 1, value)
366
- else
367
- statement.setInt(idx + 1, value)
368
- end
369
- when BigDecimal
370
- statement.setBigDecimal(idx + 1, value.to_java)
371
- when Float
372
- statement.setFloat(idx + 1, value)
373
- when String
374
- statement.setString(idx + 1, value)
375
- when Array, Hash
376
- statement.setString(idx + 1, value.to_json)
377
- when true, false
378
- statement.setBoolean(idx + 1, value)
379
- else
380
- statement.setString(idx + 1, nil)
381
- end
382
- end
383
-
384
- statement
385
- end
386
-
387
- def retry_exception?(exception, event)
388
- retrying = (exception.respond_to? 'getSQLState' and (RETRYABLE_SQLSTATE_CLASSES.include?(exception.getSQLState.to_s[0,2]) or @retry_sql_states.include?(exception.getSQLState)))
389
- log_jdbc_exception(exception, retrying, event)
390
-
391
- retrying
190
+ def get_connection
191
+ connection = @pool.getConnection
192
+ rescue => e
193
+ log_jdbc_exception(e, true, nil)
194
+ false
392
195
  end
393
196
 
394
197
  def log_jdbc_exception(exception, retrying, event)
@@ -400,7 +203,7 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
400
203
  loop do
401
204
  # TODO reformat event output so that it only shows the fields necessary.
402
205
 
403
- @logger.send(log_method, log_text, :exception => current_exception, :statement => @statement[0], :event => event)
206
+ @logger.send(log_method, log_text, :exception => current_exception, :event => event)
404
207
 
405
208
  if current_exception.respond_to? 'getNextException'
406
209
  current_exception = current_exception.getNextException()
@@ -411,18 +214,4 @@ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
411
214
  break if current_exception == nil
412
215
  end
413
216
  end
414
-
415
- def next_sleep_interval(current_interval)
416
- doubled = current_interval * 2
417
- doubled > @retry_max_interval ? @retry_max_interval : doubled
418
- end
419
- end # class LogStash::Outputs::Charrington
420
-
421
- class TableNameNil < StandardError
422
- attr_reader :event
423
-
424
- def initialize(msg='Table name is nil', event={})
425
- @event = event
426
- super(msg)
427
- end
428
217
  end
@@ -0,0 +1,98 @@
1
+ require File.join(File.dirname(__FILE__), "service")
2
+
3
+ module Charrington
4
+ class AlterTable
5
+ # This service will add columns to an existing table dynamically based on finding new keys in the JSON structure.
6
+ # This is potentially called from Insert when an insert fails.
7
+
8
+ include Service
9
+ attr_reader :connection, :event, :table_name, :columns
10
+ attr_accessor :column_types
11
+
12
+ Error = Class.new(StandardError)
13
+ AlterFailed = Class.new(Error)
14
+
15
+ def initialize(connection, event, table_name, columns)
16
+ @connection = connection
17
+ @event = event
18
+ @table_name = table_name
19
+ @columns = columns
20
+ @column_types = []
21
+ end
22
+
23
+ def call
24
+ set_column_types
25
+ alter_table
26
+ true
27
+ rescue => e
28
+ raise AlterFailed, e.message
29
+ ensure
30
+ @column_types.clear if @column_types.is_a? Array
31
+ end
32
+
33
+ private
34
+
35
+ def alter_table
36
+ execute("ALTER TABLE IF EXISTS #{table_name} #{columns_fragment}")
37
+ end
38
+
39
+ def columns_fragment
40
+ column_types.map do |column|
41
+ "ADD COLUMN IF NOT EXISTS #{column}"
42
+ end.join(",")
43
+ end
44
+
45
+ def set_column_types
46
+ (columns - current_table_columns).each_with_index do |key, idx|
47
+
48
+ case event[key]
49
+ when Time, LogStash::Timestamp
50
+ column_types << "#{key} TIMESTAMP"
51
+ when Date
52
+ column_types << "#{key} DATE"
53
+ when Integer
54
+ column_types << "#{key} BIGINT"
55
+ when BigDecimal
56
+ column_types << "#{key} DECIMAL"
57
+ when Float
58
+ column_types << "#{key} DOUBLE PRECISION"
59
+ when true, false
60
+ column_types << "#{key} BOOLEAN"
61
+ else
62
+ column_types << "#{key} VARCHAR"
63
+ end
64
+ end
65
+ end
66
+
67
+ def current_table_columns
68
+ sql = "SELECT * FROM #{table_name} LIMIT 1;"
69
+ rs = executeQuery(sql)
70
+ meta_data = rs.getMetaData()
71
+ column_count = meta_data.getColumnCount()
72
+
73
+ (1..column_count).map {|i| meta_data.getColumnName(i) }
74
+ end
75
+
76
+ def execute(sql)
77
+ stmt = connection.prepareStatement(prep_sql(sql))
78
+ stmt.execute()
79
+ rescue Java::OrgPostgresqlUtil::PSQLException => e
80
+ # @logger.error("#{e.message}")
81
+ ensure
82
+ stmt.close unless stmt.nil?
83
+ end
84
+
85
+ def executeQuery(sql)
86
+ stmt = connection.createStatement()
87
+ stmt.executeQuery(prep_sql(sql))
88
+ rescue Java::OrgPostgresqlUtil::PSQLException => e
89
+ # @logger.error("#{e.message}")
90
+ ensure
91
+ stmt.close unless stmt.nil?
92
+ end
93
+
94
+ def prep_sql(sql)
95
+ sql.gsub(/\s+/, " ").strip
96
+ end
97
+ end
98
+ end
@@ -0,0 +1,69 @@
1
+ require File.join(File.dirname(__FILE__), "service")
2
+
3
+ module Charrington
4
+ class CreateTable
5
+ # This service will create a table dynamically based on the JSON structure.
6
+ # This is potentially called from Insert when an insert fails.
7
+
8
+ include Service
9
+ attr_reader :connection, :event, :table_name, :columns
10
+ attr_accessor :column_types
11
+
12
+ Error = Class.new(StandardError)
13
+ CreateFailed = Class.new(Error)
14
+
15
+ def initialize(connection, event, table_name, columns)
16
+ @connection = connection
17
+ @event = event.to_hash
18
+ @table_name = table_name
19
+ @columns = columns
20
+ @column_types = []
21
+ end
22
+
23
+ def call
24
+ set_column_types
25
+ create_table
26
+ true
27
+ rescue => e
28
+ raise CreateFailed, e.message
29
+ ensure
30
+ @column_types.clear if @column_types.is_a? Array
31
+ end
32
+
33
+ private
34
+
35
+ def set_column_types
36
+ columns.each do |column|
37
+ case event[column]
38
+ when Time, LogStash::Timestamp
39
+ column_types << "#{column} TIMESTAMP"
40
+ when Date
41
+ column_types << "#{column} DATE"
42
+ when Integer
43
+ column_types << "#{column} BIGINT"
44
+ when BigDecimal
45
+ column_types << "#{column} DECIMAL"
46
+ when Float
47
+ column_types << "#{column} DOUBLE PRECISION"
48
+ when true, false
49
+ column_types << "#{column} BOOLEAN"
50
+ else
51
+ column_types << "#{column} VARCHAR"
52
+ end
53
+ end
54
+ end
55
+
56
+ def create_table
57
+ execute("CREATE TABLE IF NOT EXISTS #{table_name} (#{column_types.join(', ')})")
58
+ end
59
+
60
+ def execute(sql)
61
+ statement = connection.prepareStatement( sql.gsub(/\s+/, " ").strip )
62
+ statement.execute()
63
+ rescue Java::OrgPostgresqlUtil::PSQLException => e
64
+ # @logger.error("#{e.message}")
65
+ ensure
66
+ statement.close unless statement.nil?
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,141 @@
1
+ require File.join(File.dirname(__FILE__), "create_table")
2
+ require File.join(File.dirname(__FILE__), "alter_table")
3
+ require File.join(File.dirname(__FILE__), "service")
4
+
5
+ module Charrington
6
+ class Insert
7
+ # This service assumes that the data is already clean and in a flattened hash format.
8
+ # The Transform service should be called before calling this.
9
+
10
+ include Service
11
+ attr_accessor :event, :should_retry
12
+ attr_reader :connection, :schema, :table_name, :columns
13
+ attr_reader :event_as_json_keyword, :enable_event_as_json_keyword
14
+
15
+ Error = Class.new(StandardError)
16
+ EventNil = Class.new(Error)
17
+ TableNameNil = Class.new(Error)
18
+ InsertFailed = Class.new(Error)
19
+
20
+ def initialize(connection, event, opts = {})
21
+ raise EventNil, "Table name is nil" if event.nil?
22
+ @event = event.to_hash
23
+
24
+ event_name = event["event"].to_s.downcase.strip
25
+ raise TableNameNil, "Table name is nil" if event_name.empty?
26
+
27
+ @connection = connection
28
+ @schema = opts[:schema].empty? ? '' : "#{opts[:schema]}."
29
+ @table_name = "#{@schema}#{event_name.gsub(/[^a-z0-9]+/, "_")}"
30
+
31
+ @columns = event.keys
32
+ @should_retry = false
33
+ @enable_event_as_json_keyword = opts[:enable_event_as_json_keyword]
34
+ @event_as_json_keyword = opts[:event_as_json_keyword]
35
+ end
36
+
37
+ def call
38
+ stmt = connection.prepareStatement(insert_statement)
39
+ stmt = add_statement_event_params(stmt)
40
+ stmt.execute
41
+ should_retry
42
+ rescue Java::OrgPostgresqlUtil::PSQLException => e
43
+ case e.getSQLState()
44
+ when "42P01"
45
+ should_retry = Charrington::CreateTable.call(connection, event, table_name, columns)
46
+ when "42703"
47
+ should_retry = Charrington::AlterTable.call(connection, event, table_name, columns)
48
+ else
49
+ raise InsertFailed, "Charrington: Rescue from SQLException #{e.message}"
50
+ end
51
+ should_retry
52
+ rescue => e
53
+ raise InsertFailed, "Charrington: Rescue from SQLException #{e.message}"
54
+ ensure
55
+ stmt.close unless stmt.nil?
56
+ cleanup
57
+ end
58
+
59
+ private
60
+
61
+ def cleanup
62
+ @columns.clear if clearable(@columns)
63
+ end
64
+
65
+ ### Set Variables
66
+
67
+ def columns_text
68
+ @columns_text ||= arr_to_csv(columns)
69
+ end
70
+
71
+ def value_placeholders
72
+ ('?' * columns.length).split('')
73
+ end
74
+
75
+ def insert_values
76
+ arr_to_csv(value_placeholders)
77
+ end
78
+
79
+ def insert_statement
80
+ "INSERT INTO #{table_name} #{columns_text} VALUES #{insert_values}"
81
+ end
82
+
83
+ def prepared_statement
84
+ columns.map { |column| "[#{column}]" }
85
+ end
86
+
87
+ def add_statement_event_params(stmt)
88
+ columns.each_with_index do |key, idx|
89
+ pos = idx + 1
90
+ value = event[key]
91
+
92
+ case value
93
+ when Time
94
+ stmt.setString(pos, value.strftime(STRFTIME_FMT))
95
+ when LogStash::Timestamp
96
+ stmt.setString(pos, value.time.strftime(STRFTIME_FMT))
97
+ when Integer
98
+ if value > 2147483647 || value < -2147483648
99
+ stmt.setLong(pos, value)
100
+ else
101
+ stmt.setInt(pos, value)
102
+ end
103
+ when BigDecimal
104
+ stmt.setBigDecimal(pos, value.to_java)
105
+ when Float
106
+ stmt.setFloat(pos, value)
107
+ when String
108
+ stmt.setString(pos, value)
109
+ when Array, Hash
110
+ stmt.setString(pos, value.to_json)
111
+ when true, false
112
+ stmt.setBoolean(pos, value)
113
+ else
114
+ stmt.setString(pos, nil)
115
+ end
116
+ end
117
+ stmt
118
+ end
119
+
120
+ ### Helpers
121
+
122
+ def arr_to_csv(arr)
123
+ '(' + arr.join(', ') + ')'
124
+ end
125
+
126
+ def clearable(obj)
127
+ obj.is_a? Hash or obj.is_a? Array
128
+ end
129
+
130
+ ### SQL
131
+
132
+ def execute(connection, sql)
133
+ statement = connection.prepareStatement( sql.gsub(/\s+/, " ").strip )
134
+ statement.execute()
135
+ rescue Java::OrgPostgresqlUtil::PSQLException => e
136
+ @logger.error("#{e.message}")
137
+ ensure
138
+ statement.close unless statement.nil?
139
+ end
140
+ end
141
+ end
@@ -0,0 +1,62 @@
1
+ require File.join(File.dirname(__FILE__), "service")
2
+
3
+ module Charrington
4
+ class Process
5
+ # This service starts the process of attempting to insert a row.
6
+ # It handles retries where applicable.
7
+
8
+ include Service
9
+ attr_reader :event, :connection, :opts, :max_retries, :schema, :retry_max_interval
10
+ attr_accessor :retry_interval, :should_retry
11
+
12
+ Error = Class.new(StandardError)
13
+ ProcessFailed = Class.new(Error)
14
+ EventNil = Class.new(Error)
15
+
16
+ def initialize(connection, event, opts={})
17
+ raise EventNil, "Event is nil" if event.nil?
18
+ @connection = connection
19
+ @event = event.to_hash
20
+ @opts = opts
21
+
22
+ @max_retries = opts[:max_retries] || 10
23
+ @retry_max_interval = opts[:retry_max_interval] || 2
24
+ @retry_interval = opts[:retry_initial_interval] || 2
25
+
26
+ @attempts = 1
27
+ @should_retry = true
28
+ end
29
+
30
+ def call
31
+ while should_retry do
32
+ transformed = Charrington::Transform.call(event)
33
+ should_retry = Charrington::Insert.call(connection, transformed, opts)
34
+ break if !should_retry
35
+
36
+ @attempts += 1
37
+ break if @attempts > max_retries
38
+
39
+ # If we're retrying the action, sleep for the recommended interval
40
+ # Double the interval for the next time through to achieve exponential backoff
41
+ sleep_interval
42
+ end
43
+ rescue => e
44
+ raise ProcessFailed, e.message
45
+ ensure
46
+ connection.close unless connection.nil?
47
+ @event.clear if clearable(@event)
48
+ end
49
+
50
+ private
51
+
52
+ def sleep_interval
53
+ sleep(retry_interval)
54
+ doubled = retry_interval * 2
55
+ retry_interval = doubled > retry_max_interval ? retry_max_interval : doubled
56
+ end
57
+
58
+ def clearable(obj)
59
+ obj.is_a? Hash or obj.is_a? Array
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,12 @@
1
+ # frozen_string_literal: true
2
+ module Service
3
+ def self.included(base)
4
+ base.extend ClassMethods
5
+ end
6
+
7
+ module ClassMethods
8
+ def call(*args)
9
+ new(*args).call
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,47 @@
1
+ require File.join(File.dirname(__FILE__), "service")
2
+
3
+ module Charrington
4
+ class Transform
5
+ include Service
6
+ attr_accessor :event
7
+ attr_reader :top_level_keys
8
+
9
+ Error = Class.new(StandardError)
10
+ EventNil = Class.new(Error)
11
+ TableNameNil = Class.new(Error)
12
+
13
+ KEY_BLACKLIST = ['host','path','jwt']
14
+
15
+ def initialize(event)
16
+ raise EventNil, "Event is nil" if event.nil?
17
+ event = event.is_a?(Hash) ? event : event.to_hash
18
+ @event = drop_keys(event)
19
+ @top_level_keys = @event.keys
20
+ end
21
+
22
+ def call
23
+ flattened = flatten_hash(event)
24
+ top_level_keys.each { |k| event.delete(k) }
25
+ flattened.each_pair { |key, val| event[key] = val }
26
+ event
27
+ end
28
+
29
+ private
30
+
31
+ def drop_keys(event)
32
+ event.delete_if {|k, _v| k.start_with?("@") || KEY_BLACKLIST.include?(k) }
33
+ end
34
+
35
+ def flatten_hash(hash)
36
+ hash.each_with_object({}) do |(k, v), acc|
37
+ if v.is_a? Hash
38
+ flatten_hash(v).map do |h_k, h_v|
39
+ acc["#{k}_#{h_k}"] = h_v
40
+ end
41
+ else
42
+ acc[k] = v
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -1,6 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-charrington'
3
- s.version = '0.1.1'
3
+ s.version = '0.2.0'
4
+
4
5
  s.licenses = ['Apache License (2.0)']
5
6
  s.summary = 'This plugin allows you to output to SQL, via JDBC'
6
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-charrington'. This gem is not a stand-alone program"
@@ -27,4 +28,6 @@ Gem::Specification.new do |s|
27
28
  s.add_development_dependency 'jar-dependencies'
28
29
  s.add_development_dependency 'ruby-maven', '~> 3.3'
29
30
  s.add_development_dependency 'rubocop', '0.41.2'
31
+ s.add_development_dependency 'logstash-input-generator'
32
+ s.add_development_dependency 'logstash-codec-json'
30
33
  end
@@ -1,11 +1,44 @@
1
- # require_relative '../charrington_spec_helper'
2
- #
3
- # describe LogStash::Outputs::Jdbc do
4
- # context 'when initializing' do
5
- # it 'shouldn\'t register without a config' do
6
- # expect do
7
- # LogStash::Plugin.lookup('output', 'charrington').new
8
- # end.to raise_error(LogStash::ConfigurationError)
9
- # end
10
- # end
11
- # end
1
+ require_relative '../charrington_spec_helper'
2
+
3
+ describe LogStash::Outputs::Charrington do
4
+ describe 'when initializing' do
5
+ it 'shouldn\'t register without a config' do
6
+ expect do
7
+ LogStash::Plugin.lookup('output', 'charrington').new
8
+ end.to raise_error(LogStash::ConfigurationError)
9
+ end
10
+ end
11
+
12
+ describe 'integration tests' do
13
+ config <<-CONFIG
14
+ input {
15
+ generator {
16
+ message => '{"id": "abc", "app_name": "Web App", "event": "Hi - Dan"}'
17
+ codec => 'json'
18
+ count => 1
19
+ }
20
+ }
21
+
22
+ output {
23
+ charrington {
24
+ connection_string => 'jdbc:postgresql://localhost:5432/winston?user=postgres&password=postgres'
25
+ driver_jar_path => '/projects/logstash-output-charrington/vendor/postgresql-42.2.5.jar'
26
+ schema => 'dea'
27
+ }
28
+ }
29
+ CONFIG
30
+
31
+ agent do
32
+ puts "IT'S WORKING!!!!!"
33
+ end
34
+ end
35
+
36
+ # context 'running' do
37
+ # it 'should transform some JSON' do
38
+ # transformed = Charrington::Transform.call({"a" => 1})
39
+ # puts transformed
40
+ # end
41
+ # end
42
+ end
43
+
44
+
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-charrington
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - dconger
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2019-05-30 00:00:00.000000000 Z
13
+ date: 2019-06-17 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  requirement: !ruby/object:Gem::Requirement
@@ -102,6 +102,34 @@ dependencies:
102
102
  - - '='
103
103
  - !ruby/object:Gem::Version
104
104
  version: 0.41.2
105
+ - !ruby/object:Gem::Dependency
106
+ requirement: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ name: logstash-input-generator
112
+ prerelease: false
113
+ type: :development
114
+ version_requirements: !ruby/object:Gem::Requirement
115
+ requirements:
116
+ - - ">="
117
+ - !ruby/object:Gem::Version
118
+ version: '0'
119
+ - !ruby/object:Gem::Dependency
120
+ requirement: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
125
+ name: logstash-codec-json
126
+ prerelease: false
127
+ type: :development
128
+ version_requirements: !ruby/object:Gem::Requirement
129
+ requirements:
130
+ - - ">="
131
+ - !ruby/object:Gem::Version
132
+ version: '0'
105
133
  description: This gem is a logstash plugin required to be installed on top of the
106
134
  Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-charrington'.
107
135
  This gem is not a stand-alone program
@@ -118,6 +146,12 @@ files:
118
146
  - lib/com/zaxxer/HikariCP/2.7.2/HikariCP-2.7.2.jar
119
147
  - lib/logstash-output-charrington_jars.rb
120
148
  - lib/logstash/outputs/charrington.rb
149
+ - lib/logstash/outputs/charrington/alter_table.rb
150
+ - lib/logstash/outputs/charrington/create_table.rb
151
+ - lib/logstash/outputs/charrington/insert.rb
152
+ - lib/logstash/outputs/charrington/process.rb
153
+ - lib/logstash/outputs/charrington/service.rb
154
+ - lib/logstash/outputs/charrington/transform.rb
121
155
  - lib/org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar
122
156
  - lib/org/apache/logging/log4j/log4j-slf4j-impl/2.6.2/log4j-slf4j-impl-2.6.2.jar
123
157
  - lib/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar