logstash-output-charrington 0.3.24 → 0.3.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +6 -4
- data/lib/logstash/outputs/charrington/alter_postgres_table.rb +41 -41
- data/lib/logstash/outputs/charrington/alter_redshift_table.rb +50 -49
- data/lib/logstash/outputs/charrington/create_postgres_table.rb +33 -32
- data/lib/logstash/outputs/charrington/create_redshift_table.rb +34 -32
- data/lib/logstash/outputs/charrington/insert.rb +64 -64
- data/lib/logstash/outputs/charrington/process.rb +23 -19
- data/lib/logstash/outputs/charrington/service.rb +4 -0
- data/lib/logstash/outputs/charrington/transform_postgres.rb +10 -6
- data/lib/logstash/outputs/charrington/transform_redshift.rb +40 -36
- data/lib/logstash/outputs/charrington.rb +32 -37
- data/lib/logstash-output-charrington_jars.rb +2 -1
- data/logstash-output-charrington.gemspec +9 -9
- data/spec/charrington_spec_helper.rb +30 -35
- data/spec/{logstash-output-charrington_test_jars.rb → logstash_output_charrington_test_jars.rb} +2 -1
- data/spec/outputs/charrington_mysql_spec.rb +1 -0
- data/spec/outputs/charrington_postgres_spec.rb +1 -1
- data/spec/outputs/charrington_spec.rb +33 -29
- metadata +35 -57
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: bbc89a196e03294552a4b25fef760461874c3eb8fad194845e127cc517d1a53f
|
4
|
+
data.tar.gz: a330a0c2c09a9d4c8746244e3320e76ce121bfee85e97304ddebf87deccf9b31
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e206bed7a803e0f999931fdddc0f95769cab0ed47ac9813ae27934decf695de7f1a5d38f95f01168c3d8ae6eede046c5b247a31a1efb8a6919ec34cc32da6905
|
7
|
+
data.tar.gz: d94bcd0d15c6fe32bcaa937f3a78231f592e2dffd90217e8a8635faaef3747c121989e8552ae66d5a019be9cf3e5ba066c375922608f25a332dcaf596e5efa20
|
data/Gemfile
CHANGED
@@ -1,11 +1,13 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
source 'https://rubygems.org'
|
2
4
|
|
3
5
|
gemspec
|
4
6
|
|
5
|
-
logstash_path = ENV[
|
6
|
-
use_logstash_source = ENV[
|
7
|
+
logstash_path = ENV['LOGSTASH_PATH'] || '../../logstash'
|
8
|
+
use_logstash_source = ENV['LOGSTASH_SOURCE'] && ENV['LOGSTASH_SOURCE'].to_s == '1'
|
7
9
|
|
8
10
|
if Dir.exist?(logstash_path) && use_logstash_source
|
9
|
-
gem 'logstash-core', :
|
10
|
-
gem 'logstash-core-plugin-api', :
|
11
|
+
gem 'logstash-core', path: "#{logstash_path}/logstash-core"
|
12
|
+
gem 'logstash-core-plugin-api', path: "#{logstash_path}/logstash-core-plugin-api"
|
11
13
|
end
|
@@ -1,10 +1,11 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require File.join(File.dirname(__FILE__), 'service')
|
2
4
|
|
3
5
|
module Charrington
|
6
|
+
# This service will add columns to an existing table dynamically based on finding new keys in the JSON structure.
|
7
|
+
# This is potentially called from Insert when an insert fails.
|
4
8
|
class AlterPostgresTable
|
5
|
-
# This service will add columns to an existing table dynamically based on finding new keys in the JSON structure.
|
6
|
-
# This is potentially called from Insert when an insert fails.
|
7
|
-
|
8
9
|
include Service
|
9
10
|
include LogStash::Util::Loggable
|
10
11
|
attr_reader :connection, :event, :table_name, :columns, :schema
|
@@ -26,7 +27,7 @@ module Charrington
|
|
26
27
|
set_column_types
|
27
28
|
alter_table
|
28
29
|
true
|
29
|
-
rescue => e
|
30
|
+
rescue StandardError => e
|
30
31
|
raise AlterFailed, e.message
|
31
32
|
ensure
|
32
33
|
@column_types.clear if @column_types.is_a? Array
|
@@ -41,69 +42,68 @@ module Charrington
|
|
41
42
|
def columns_fragment
|
42
43
|
column_types.map do |column|
|
43
44
|
"ADD COLUMN IF NOT EXISTS #{column}"
|
44
|
-
end.join(
|
45
|
+
end.join(',')
|
45
46
|
end
|
46
47
|
|
47
48
|
def set_column_types
|
48
|
-
(columns - current_table_columns).each_with_index do |key,
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
end
|
49
|
+
(columns - current_table_columns).each_with_index do |key, _idx|
|
50
|
+
column_types << case event[key]
|
51
|
+
when Time, LogStash::Timestamp
|
52
|
+
"#{key} TIMESTAMP"
|
53
|
+
when Date
|
54
|
+
"#{key} DATE"
|
55
|
+
when Integer
|
56
|
+
"#{key} BIGINT"
|
57
|
+
when BigDecimal
|
58
|
+
"#{key} DECIMAL"
|
59
|
+
when Float
|
60
|
+
"#{key} DOUBLE PRECISION"
|
61
|
+
when true, false
|
62
|
+
"#{key} BOOLEAN"
|
63
|
+
else
|
64
|
+
"#{key} VARCHAR"
|
65
|
+
end
|
66
66
|
end
|
67
67
|
end
|
68
68
|
|
69
69
|
def current_table_columns
|
70
70
|
sql = "SELECT * FROM #{schema}#{table_name} LIMIT 1;"
|
71
|
-
stmt, rs =
|
72
|
-
meta_data = rs.getMetaData
|
73
|
-
column_count = meta_data.getColumnCount
|
74
|
-
(1..column_count).map {|i| meta_data.getColumnName(i) }
|
71
|
+
stmt, rs = execute_query(prep_sql(sql))
|
72
|
+
meta_data = rs.getMetaData
|
73
|
+
column_count = meta_data.getColumnCount
|
74
|
+
(1..column_count).map { |i| meta_data.getColumnName(i) }
|
75
75
|
ensure
|
76
|
-
stmt
|
76
|
+
stmt&.close
|
77
77
|
end
|
78
78
|
|
79
79
|
def execute(sql)
|
80
80
|
stmt = connection.prepareStatement(prep_sql(sql))
|
81
|
-
stmt.execute
|
81
|
+
stmt.execute
|
82
82
|
rescue Java::OrgPostgresqlUtil::PSQLException => e
|
83
|
-
|
83
|
+
logger.error "PSQLException: #{e.message}"
|
84
84
|
ensure
|
85
|
-
stmt
|
85
|
+
stmt&.close
|
86
86
|
end
|
87
87
|
|
88
|
-
def
|
89
|
-
stmt = connection.createStatement
|
88
|
+
def execute_query(sql)
|
89
|
+
stmt = connection.createStatement
|
90
90
|
# only close the statement if something goes wrong
|
91
91
|
# otherwise, the caller is responsible for closing the
|
92
92
|
# statement when they are doen with the result set
|
93
|
-
|
93
|
+
[stmt, stmt.execute_query(prep_sql(sql))]
|
94
94
|
rescue Java::OrgPostgresqlUtil::PSQLException => e
|
95
95
|
puts "PSQLException: #{e.message}"
|
96
|
-
|
97
|
-
stmt
|
96
|
+
logger.info "PSQLException: #{e.message}"
|
97
|
+
stmt&.close
|
98
98
|
# @logger.error("#{e.message}")
|
99
|
-
rescue => e
|
99
|
+
rescue StandardError => e
|
100
100
|
puts "Unknown exception: #{e.message}"
|
101
|
-
|
102
|
-
stmt
|
101
|
+
logger.info "Unknown exception: #{e.message}"
|
102
|
+
stmt&.close
|
103
103
|
end
|
104
104
|
|
105
105
|
def prep_sql(sql)
|
106
|
-
sql.gsub(/\s+/,
|
106
|
+
sql.gsub(/\s+/, ' ').strip
|
107
107
|
end
|
108
108
|
end
|
109
109
|
end
|
@@ -1,10 +1,11 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require File.join(File.dirname(__FILE__), 'service')
|
2
4
|
|
3
5
|
module Charrington
|
6
|
+
# This service will add columns to an existing table dynamically based on finding new keys in the JSON structure.
|
7
|
+
# This is potentially called from Insert when an insert fails.
|
4
8
|
class AlterRedshiftTable
|
5
|
-
# This service will add columns to an existing table dynamically based on finding new keys in the JSON structure.
|
6
|
-
# This is potentially called from Insert when an insert fails.
|
7
|
-
|
8
9
|
include Service
|
9
10
|
include LogStash::Util::Loggable
|
10
11
|
attr_reader :connection, :event, :table_name, :columns, :schema
|
@@ -26,7 +27,7 @@ module Charrington
|
|
26
27
|
set_column_types
|
27
28
|
alter_table
|
28
29
|
true
|
29
|
-
rescue => e
|
30
|
+
rescue StandardError => e
|
30
31
|
raise AlterFailed, e.message
|
31
32
|
ensure
|
32
33
|
@column_types.clear if @column_types.is_a? Array
|
@@ -35,89 +36,89 @@ module Charrington
|
|
35
36
|
private
|
36
37
|
|
37
38
|
def alter_table
|
38
|
-
execute_list(
|
39
|
+
execute_list(list_of_alter_table_stmts)
|
39
40
|
end
|
40
41
|
|
41
|
-
def
|
42
|
+
def list_of_alter_table_stmts
|
42
43
|
column_types.map do |column|
|
43
44
|
"ALTER TABLE #{schema}#{table_name} ADD COLUMN #{column}"
|
44
45
|
end
|
45
46
|
end
|
46
47
|
|
47
48
|
def set_column_types
|
48
|
-
(columns - current_table_columns).each_with_index do |key,
|
49
|
-
|
50
|
-
|
51
|
-
case event[key]
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
49
|
+
(columns - current_table_columns).each_with_index do |key, _idx|
|
50
|
+
logger.info "New column: #{key}, because of event: #{event}"
|
51
|
+
|
52
|
+
column_types << case event[key]
|
53
|
+
when Time, LogStash::Timestamp
|
54
|
+
"#{key} TIMESTAMP"
|
55
|
+
when Date
|
56
|
+
"#{key} DATE"
|
57
|
+
when Integer
|
58
|
+
"#{key} BIGINT"
|
59
|
+
when BigDecimal
|
60
|
+
"#{key} DECIMAL"
|
61
|
+
when Float
|
62
|
+
"#{key} DOUBLE PRECISION"
|
63
|
+
when true, false
|
64
|
+
"#{key} BOOLEAN"
|
65
|
+
else
|
66
|
+
"#{key} VARCHAR(512)"
|
67
|
+
end
|
67
68
|
end
|
68
69
|
end
|
69
70
|
|
70
71
|
def current_table_columns
|
71
72
|
sql = "SELECT * FROM #{schema}#{table_name} LIMIT 1;"
|
72
|
-
stmt, rs =
|
73
|
-
meta_data = rs.getMetaData
|
74
|
-
column_count = meta_data.getColumnCount
|
75
|
-
(1..column_count).map {|i| meta_data.getColumnName(i) }
|
73
|
+
stmt, rs = execute_query(prep_sql(sql))
|
74
|
+
meta_data = rs.getMetaData
|
75
|
+
column_count = meta_data.getColumnCount
|
76
|
+
(1..column_count).map { |i| meta_data.getColumnName(i) }
|
76
77
|
ensure
|
77
|
-
|
78
|
-
stmt
|
78
|
+
logger.info "Within ensure block of current_table_columns in alter_redshift_table.rb and value of stmt.nil?: #{stmt.nil?}"
|
79
|
+
stmt&.close
|
79
80
|
end
|
80
81
|
|
81
82
|
def execute_list(list_of_sql_stmts)
|
82
|
-
|
83
|
+
logger.info "Received list of sql statments to execute: #{list_of_sql_stmts}"
|
83
84
|
|
84
85
|
list_of_sql_stmts.each_with_index do |sql, idx|
|
85
|
-
|
86
|
+
logger.info "Executing ALTER TABLE statement with index #{idx} and sql of: #{sql}"
|
86
87
|
execute(sql)
|
87
88
|
end
|
88
89
|
end
|
89
90
|
|
90
91
|
def execute(sql)
|
91
92
|
stmt = connection.prepareStatement(prep_sql(sql))
|
92
|
-
stmt.execute
|
93
|
+
stmt.execute
|
93
94
|
rescue Java::JavaSql::SQLException => e
|
94
|
-
|
95
|
-
rescue => e
|
96
|
-
|
95
|
+
logger.error "Alter Redshift SQLException: #{e.message}, with SQL: #{sql}"
|
96
|
+
rescue StandardError => e
|
97
|
+
logger.error "Alter Redshift Unknown exception: #{e.message}, with SQL: #{sql}"
|
97
98
|
ensure
|
98
|
-
|
99
|
-
stmt
|
99
|
+
logger.error "Within ensure block of execute in alter_redshift_table.rb and value of stmt.nil?: #{stmt.nil?}"
|
100
|
+
stmt&.close
|
100
101
|
end
|
101
102
|
|
102
|
-
def
|
103
|
-
stmt = connection.createStatement
|
103
|
+
def execute_query(sql)
|
104
|
+
stmt = connection.createStatement
|
104
105
|
# only close the statement if something goes wrong
|
105
106
|
# otherwise, the caller is responsible for closing the
|
106
107
|
# statement when they are doen with the result set
|
107
|
-
|
108
|
+
[stmt, stmt.execute_query(prep_sql(sql))]
|
108
109
|
rescue Java::JavaSql::SQLException => e
|
109
110
|
puts "execute query SQLException: #{e.message}"
|
110
|
-
|
111
|
-
stmt
|
111
|
+
logger.info "execute query SQLException: #{e.message}, with SQL: #{sql}"
|
112
|
+
stmt&.close
|
112
113
|
# @logger.error("#{e.message}")
|
113
|
-
rescue => e
|
114
|
+
rescue StandardError => e
|
114
115
|
puts "execute query Unknown exception: #{e.message}"
|
115
|
-
|
116
|
-
stmt
|
116
|
+
logger.info "execute query Unknown exception: #{e.message}, with SQL: #{sql}"
|
117
|
+
stmt&.close
|
117
118
|
end
|
118
119
|
|
119
120
|
def prep_sql(sql)
|
120
|
-
sql.gsub(/\s+/,
|
121
|
+
sql.gsub(/\s+/, ' ').strip
|
121
122
|
end
|
122
123
|
end
|
123
124
|
end
|
@@ -1,11 +1,12 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require File.join(File.dirname(__FILE__), 'service')
|
2
4
|
|
3
5
|
module Charrington
|
6
|
+
# This service will create a table dynamically based on the JSON structure.
|
7
|
+
# This is potentially called from Insert when an insert fails.
|
4
8
|
class CreatePostgresTable
|
5
|
-
|
6
|
-
# This is potentially called from Insert when an insert fails.
|
7
|
-
|
8
|
-
@@timestamp_columns = %w(published_at sent_at original_timestamp received_at timestamp)
|
9
|
+
TIMESTAMP_COLUMNS = %w[published_at sent_at original_timestamp received_at timestamp].freeze
|
9
10
|
|
10
11
|
include Service
|
11
12
|
include LogStash::Util::Loggable
|
@@ -15,7 +16,7 @@ module Charrington
|
|
15
16
|
Error = Class.new(StandardError)
|
16
17
|
CreateFailed = Class.new(Error)
|
17
18
|
|
18
|
-
def initialize(connection, event, schema, table_name, columns, opts = {})
|
19
|
+
def initialize(connection, event, schema, table_name, columns, opts = {}) # rubocop:disable Metrics/ParameterLists
|
19
20
|
@connection = connection
|
20
21
|
@event = event.to_hash
|
21
22
|
@table_name = table_name
|
@@ -29,7 +30,7 @@ module Charrington
|
|
29
30
|
set_column_types
|
30
31
|
create_table
|
31
32
|
true
|
32
|
-
rescue => e
|
33
|
+
rescue StandardError => e
|
33
34
|
raise CreateFailed, e.message
|
34
35
|
ensure
|
35
36
|
@column_types.clear if @column_types.is_a? Array
|
@@ -39,39 +40,39 @@ module Charrington
|
|
39
40
|
|
40
41
|
def set_column_types
|
41
42
|
columns.each do |column|
|
42
|
-
if
|
43
|
+
if TIMESTAMP_COLUMNS.include?(column)
|
43
44
|
column_types << "#{column} TIMESTAMP"
|
44
45
|
next
|
45
46
|
end
|
46
47
|
|
47
|
-
case event[column]
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
48
|
+
column_types << case event[column]
|
49
|
+
when Time, LogStash::Timestamp
|
50
|
+
"#{column} TIMESTAMP"
|
51
|
+
when Date
|
52
|
+
"#{column} DATE"
|
53
|
+
when Integer
|
54
|
+
"#{column} BIGINT"
|
55
|
+
when BigDecimal
|
56
|
+
"#{column} DECIMAL"
|
57
|
+
when Float
|
58
|
+
"#{column} DOUBLE PRECISION"
|
59
|
+
when true, false
|
60
|
+
"#{column} BOOLEAN"
|
61
|
+
else
|
62
|
+
"#{column} VARCHAR(512)"
|
63
|
+
end
|
63
64
|
end
|
64
65
|
end
|
65
66
|
|
66
67
|
def initial_columns
|
67
|
-
if transformer ==
|
68
|
+
if transformer == 'postgres'
|
68
69
|
[
|
69
|
-
|
70
|
-
|
70
|
+
'id SERIAL PRIMARY KEY',
|
71
|
+
'inserted_at TIMESTAMP DEFAULT NOW()'
|
71
72
|
]
|
72
73
|
else
|
73
74
|
[
|
74
|
-
|
75
|
+
'uuid_ts TIMESTAMP DEFAULT NOW()'
|
75
76
|
]
|
76
77
|
end
|
77
78
|
end
|
@@ -81,12 +82,12 @@ module Charrington
|
|
81
82
|
end
|
82
83
|
|
83
84
|
def execute(sql)
|
84
|
-
statement = connection.prepareStatement(
|
85
|
-
statement.execute
|
85
|
+
statement = connection.prepareStatement(sql.gsub(/\s+/, ' ').strip)
|
86
|
+
statement.execute
|
86
87
|
rescue Java::OrgPostgresqlUtil::PSQLException => e
|
87
|
-
|
88
|
+
logger.error "PSQLException: #{e.message} sql=#{sql}"
|
88
89
|
ensure
|
89
|
-
statement
|
90
|
+
statement&.close
|
90
91
|
end
|
91
92
|
end
|
92
93
|
end
|
@@ -1,10 +1,11 @@
|
|
1
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require File.join(File.dirname(__FILE__), 'service')
|
2
4
|
|
3
5
|
module Charrington
|
6
|
+
# This service will create a table dynamically based on the JSON structure.
|
7
|
+
# This is potentially called from Insert when an insert fails.
|
4
8
|
class CreateRedshiftTable
|
5
|
-
# This service will create a table dynamically based on the JSON structure.
|
6
|
-
# This is potentially called from Insert when an insert fails.
|
7
|
-
|
8
9
|
include Service
|
9
10
|
include LogStash::Util::Loggable
|
10
11
|
attr_reader :connection, :event, :table_name, :columns, :schema, :opts
|
@@ -13,7 +14,7 @@ module Charrington
|
|
13
14
|
Error = Class.new(StandardError)
|
14
15
|
CreateFailed = Class.new(Error)
|
15
16
|
|
16
|
-
def initialize(connection, event, schema, table_name, columns,
|
17
|
+
def initialize(connection, event, schema, table_name, columns, _opts = {}) # rubocop:disable Metrics/ParameterLists
|
17
18
|
@connection = connection
|
18
19
|
@event = event.to_hash
|
19
20
|
@schema = schema
|
@@ -24,10 +25,10 @@ module Charrington
|
|
24
25
|
|
25
26
|
def call
|
26
27
|
set_column_types
|
27
|
-
|
28
|
+
logger.info "Finished running set_column_types and now have column_types for create table of: #{column_types}"
|
28
29
|
create_table
|
29
30
|
true
|
30
|
-
rescue => e
|
31
|
+
rescue StandardError => e
|
31
32
|
raise CreateFailed, e.message
|
32
33
|
ensure
|
33
34
|
@column_types.clear if @column_types.is_a? Array
|
@@ -36,31 +37,32 @@ module Charrington
|
|
36
37
|
private
|
37
38
|
|
38
39
|
# https://docs.aws.amazon.com/redshift/latest/dg/r_CREATE_TABLE_NEW.html
|
39
|
-
def set_column_types
|
40
|
+
def set_column_types # rubocop:disable Metrics/CyclomaticComplexity
|
40
41
|
columns.each do |column|
|
41
|
-
|
42
|
+
case column
|
43
|
+
when 'id'
|
42
44
|
column_types << "#{column} VARCHAR(512) NOT NULL distkey CONSTRAINT #{table_name}_pkey primary key"
|
43
45
|
next
|
44
|
-
|
46
|
+
when 'sent_at'
|
45
47
|
column_types << "#{column} TIMESTAMP"
|
46
48
|
next
|
47
49
|
end
|
48
|
-
case event[column]
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
50
|
+
column_types << case event[column]
|
51
|
+
when Time, LogStash::Timestamp
|
52
|
+
"#{column} TIMESTAMP"
|
53
|
+
when Date
|
54
|
+
"#{column} DATE"
|
55
|
+
when Integer
|
56
|
+
"#{column} BIGINT"
|
57
|
+
when BigDecimal
|
58
|
+
"#{column} DECIMAL"
|
59
|
+
when Float
|
60
|
+
"#{column} DOUBLE PRECISION"
|
61
|
+
when true, false
|
62
|
+
"#{column} BOOLEAN"
|
63
|
+
else
|
64
|
+
"#{column} VARCHAR(512)"
|
65
|
+
end
|
64
66
|
end
|
65
67
|
end
|
66
68
|
|
@@ -79,15 +81,15 @@ module Charrington
|
|
79
81
|
end
|
80
82
|
|
81
83
|
def execute(sql)
|
82
|
-
|
84
|
+
logger.info "Running sql of: #{sql}"
|
83
85
|
|
84
|
-
statement = connection.prepareStatement(
|
85
|
-
statement.execute
|
86
|
+
statement = connection.prepareStatement(sql.gsub(/\s+/, ' ').strip)
|
87
|
+
statement.execute
|
86
88
|
rescue Java::JavaSql::SQLException => e
|
87
|
-
|
89
|
+
logger.info "Redshift SQLException: #{e.message}, with SQL: #{sql}"
|
88
90
|
ensure
|
89
|
-
|
90
|
-
statement
|
91
|
+
logger.info "Within ensure block of create_redshift_table.rb and value of statement.nil?: #{statement.nil?}"
|
92
|
+
statement&.close
|
91
93
|
end
|
92
94
|
end
|
93
95
|
end
|