logstash-output-analyticdb 5.4.0.6 → 5.4.0.11
Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 27a21842e6bfba6088e5e56ece9763a7ef3288eb135ccf0689bd632d2784c676
|
4
|
+
data.tar.gz: 772b83405d4981e98bb8685490591ee100a6a4f9c02a55b98e0635c365f4dc26
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b41bb657102f6a2512ac3f503187f55f42219db01ee9fd0f3f2d7e6bfe93d35d70aff2a1634e7908d58d8a72256ab8994e2074b1ce5a3609228ba5a0a9fbab25
|
7
|
+
data.tar.gz: 725a27df6db012727913bced51dba68a7c2fdb642483f637a5fdbc85be19014b7a0d825b22ef5cbb5c7bf24e04ab5d4b38e485fcfa1e7e3b566576318e10029d
|
data/README.md
CHANGED
@@ -6,14 +6,6 @@ This plugin is provided as an external plugin and is not part of the Logstash pr
|
|
6
6
|
This plugin allows you to output to AnalyticDB database, using JDBC adapters.
|
7
7
|
See below for tested adapters, and example configurations.
|
8
8
|
|
9
|
-
## Support & release schedule
|
10
|
-
I no longer have time at work to maintain this plugin in step with Logstash's releases, and I am not completely immersed in the Logstash ecosystem. If something is broken for you I will do my best to help, but I cannot guarantee timeframes.
|
11
|
-
|
12
|
-
Pull requests are always welcome.
|
13
|
-
|
14
|
-
## Changelog
|
15
|
-
See CHANGELOG.md
|
16
|
-
|
17
9
|
## Versions
|
18
10
|
Released versions are available via rubygems, and typically tagged.
|
19
11
|
|
@@ -94,7 +94,7 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
94
94
|
# Maximum number of sequential failed attempts, before we stop retrying.
|
95
95
|
# If set to < 1, then it will infinitely retry.
|
96
96
|
# At the default values this is a little over 10 minutes
|
97
|
-
config :max_flush_exceptions, validate: :number, default:
|
97
|
+
config :max_flush_exceptions, validate: :number, default: 100
|
98
98
|
|
99
99
|
config :max_repeat_exceptions, obsolete: 'This has been replaced by max_flush_exceptions - which behaves slightly differently. Please check the documentation.'
|
100
100
|
config :max_repeat_exceptions_time, obsolete: 'This is no longer required'
|
@@ -108,6 +108,8 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
108
108
|
|
109
109
|
config :commit_size, validate: :number, default: 32768
|
110
110
|
|
111
|
+
config :skip_exception, validate: :boolean, default: false
|
112
|
+
|
111
113
|
def register
|
112
114
|
@logger.info('JDBC - Starting up')
|
113
115
|
|
@@ -215,8 +217,6 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
215
217
|
connection = nil
|
216
218
|
statement = nil
|
217
219
|
events_to_retry = []
|
218
|
-
insert_sql = ""
|
219
|
-
sql_len = 0
|
220
220
|
is_insert_err = false
|
221
221
|
|
222
222
|
begin
|
@@ -229,7 +229,11 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
229
229
|
end
|
230
230
|
|
231
231
|
begin
|
232
|
-
|
232
|
+
pos = 0
|
233
|
+
insert_sql = ""
|
234
|
+
@logger.debug("events size: #{events.size}")
|
235
|
+
while pos < events.size do
|
236
|
+
event = events[pos]
|
233
237
|
statement = connection.prepareStatement(
|
234
238
|
(@unsafe_statement == true) ? event.sprintf(@statement[0]) : @statement[0]
|
235
239
|
)
|
@@ -237,23 +241,45 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
237
241
|
statement = add_statement_event_params(statement, event) if @statement.length > 1
|
238
242
|
stmt_str = statement.toString
|
239
243
|
one_sql = stmt_str[stmt_str.index(": ") + 2, stmt_str.length]
|
240
|
-
|
244
|
+
# on duplicate key start pos
|
245
|
+
on_duplicate_pos = one_sql.downcase.index(/on(\s+)duplicate/)
|
246
|
+
if on_duplicate_pos == nil
|
247
|
+
batch_insert_values_end_pos = one_sql.length
|
248
|
+
else
|
249
|
+
batch_insert_values_end_pos = on_duplicate_pos
|
250
|
+
end
|
251
|
+
@logger.debug("one_sql: #{one_sql}")
|
252
|
+
# trigger batch insert
|
253
|
+
if insert_sql.length + one_sql.length >= @commit_size
|
254
|
+
if insert_sql.length == 0
|
255
|
+
insert_sql = one_sql[0, batch_insert_values_end_pos]
|
256
|
+
end
|
257
|
+
if batch_insert_values_end_pos != one_sql.length
|
258
|
+
insert_sql.concat(one_sql[batch_insert_values_end_pos, one_sql.length - batch_insert_values_end_pos])
|
259
|
+
end
|
260
|
+
@logger.debug("batch 1 insert sql: #{insert_sql}")
|
241
261
|
statement.execute(insert_sql)
|
242
|
-
sql_len = 0
|
243
262
|
insert_sql = ""
|
244
263
|
end
|
245
|
-
if
|
246
|
-
insert_sql = one_sql
|
247
|
-
sql_len = one_sql.length
|
264
|
+
if insert_sql.length == 0
|
265
|
+
insert_sql = one_sql[0, batch_insert_values_end_pos]
|
248
266
|
else
|
249
|
-
insert_sql
|
250
|
-
|
267
|
+
insert_sql = insert_sql.rstrip
|
268
|
+
insert_sql.concat(", ").concat(one_sql[@pre_len, batch_insert_values_end_pos - @pre_len])
|
251
269
|
end
|
270
|
+
# loop to end
|
271
|
+
if pos == events.size - 1
|
272
|
+
if batch_insert_values_end_pos != one_sql.length
|
273
|
+
insert_sql.concat(one_sql[batch_insert_values_end_pos, one_sql.length - batch_insert_values_end_pos])
|
274
|
+
end
|
275
|
+
@logger.debug("batch 2 insert sql: #{insert_sql}")
|
276
|
+
statement.execute(insert_sql)
|
277
|
+
end
|
278
|
+
pos += 1
|
252
279
|
rescue => e
|
253
280
|
retry_exception?(e, event.to_json())
|
254
281
|
end
|
255
282
|
end
|
256
|
-
statement.execute(insert_sql)
|
257
283
|
rescue => e
|
258
284
|
@logger.error("Submit data error, sql is #{insert_sql}, error is #{e}")
|
259
285
|
is_insert_err = true
|
@@ -306,8 +332,11 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
306
332
|
attempts += 1
|
307
333
|
|
308
334
|
if attempts > @max_flush_exceptions
|
309
|
-
@
|
310
|
-
|
335
|
+
if (@skip_exception)
|
336
|
+
@logger.error("JDBC - max_flush_exceptions has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are being skipped. See previously logged exceptions for details.")
|
337
|
+
break
|
338
|
+
end
|
339
|
+
raise "JDBC - max_flush_exceptions #{max_flush_exceptions} has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are crashed. See previously logged exceptions for details."
|
311
340
|
end
|
312
341
|
end
|
313
342
|
|
@@ -370,10 +399,11 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
370
399
|
end
|
371
400
|
|
372
401
|
def retry_exception?(exception, event)
|
373
|
-
retrying = (exception.respond_to? 'getSQLState' and (RETRYABLE_SQLSTATE_CLASSES.include?(exception.getSQLState.to_s[0, 2]) or @retry_sql_states.include?(exception.getSQLState)))
|
402
|
+
# retrying = (exception.respond_to? 'getSQLState' and (RETRYABLE_SQLSTATE_CLASSES.include?(exception.getSQLState.to_s[0, 2]) or @retry_sql_states.include?(exception.getSQLState)))
|
403
|
+
retrying = true
|
374
404
|
log_jdbc_exception(exception, retrying, event)
|
375
405
|
|
376
|
-
|
406
|
+
retrying
|
377
407
|
end
|
378
408
|
|
379
409
|
def log_jdbc_exception(exception, retrying, event)
|
@@ -401,4 +431,4 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
401
431
|
doubled = current_interval * 2
|
402
432
|
doubled > @retry_max_interval ? @retry_max_interval : doubled
|
403
433
|
end
|
404
|
-
end # class LogStash::Outputs::analyticdb
|
434
|
+
end # class LogStash::Outputs::analyticdb
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-analyticdb'
|
3
|
-
s.version = '5.4.0.
|
3
|
+
s.version = '5.4.0.11'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = 'This plugin allows you to output to SQL, via JDBC'
|
6
6
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-analyticdb'. This gem is not a stand-alone program"
|
Binary file
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-analyticdb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.4.0.
|
4
|
+
version: 5.4.0.11
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- the_angry_angel
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-06-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -125,8 +125,8 @@ files:
|
|
125
125
|
- vendor/jar-dependencies/runtime-jars/HikariCP-2.7.2.jar
|
126
126
|
- vendor/jar-dependencies/runtime-jars/log4j-api-2.6.2.jar
|
127
127
|
- vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.6.2.jar
|
128
|
-
- vendor/jar-dependencies/runtime-jars/mysql-connector-java-5.1.36.jar
|
129
128
|
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.25.jar
|
129
|
+
- vendor/jar/jdbc/mysql-connector-java-8.0.11.jar
|
130
130
|
homepage: https://github.com/wuchase/logstash-output-analyticdb
|
131
131
|
licenses:
|
132
132
|
- Apache License (2.0)
|
@@ -151,7 +151,7 @@ requirements:
|
|
151
151
|
- jar 'com.zaxxer:HikariCP', '2.7.2'
|
152
152
|
- jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.6.2'
|
153
153
|
rubyforge_project:
|
154
|
-
rubygems_version: 2.7.
|
154
|
+
rubygems_version: 2.7.10
|
155
155
|
signing_key:
|
156
156
|
specification_version: 4
|
157
157
|
summary: This plugin allows you to output to SQL, via JDBC
|
Binary file
|