logstash-output-analyticdb 5.4.0.10 → 5.4.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 27a21842e6bfba6088e5e56ece9763a7ef3288eb135ccf0689bd632d2784c676
|
4
|
+
data.tar.gz: 772b83405d4981e98bb8685490591ee100a6a4f9c02a55b98e0635c365f4dc26
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b41bb657102f6a2512ac3f503187f55f42219db01ee9fd0f3f2d7e6bfe93d35d70aff2a1634e7908d58d8a72256ab8994e2074b1ce5a3609228ba5a0a9fbab25
|
7
|
+
data.tar.gz: 725a27df6db012727913bced51dba68a7c2fdb642483f637a5fdbc85be19014b7a0d825b22ef5cbb5c7bf24e04ab5d4b38e485fcfa1e7e3b566576318e10029d
|
data/README.md
CHANGED
@@ -6,14 +6,6 @@ This plugin is provided as an external plugin and is not part of the Logstash pr
|
|
6
6
|
This plugin allows you to output to AnalyticDB database, using JDBC adapters.
|
7
7
|
See below for tested adapters, and example configurations.
|
8
8
|
|
9
|
-
## Support & release schedule
|
10
|
-
I no longer have time at work to maintain this plugin in step with Logstash's releases, and I am not completely immersed in the Logstash ecosystem. If something is broken for you I will do my best to help, but I cannot guarantee timeframes.
|
11
|
-
|
12
|
-
Pull requests are always welcome.
|
13
|
-
|
14
|
-
## Changelog
|
15
|
-
See CHANGELOG.md
|
16
|
-
|
17
9
|
## Versions
|
18
10
|
Released versions are available via rubygems, and typically tagged.
|
19
11
|
|
@@ -217,8 +217,6 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
217
217
|
connection = nil
|
218
218
|
statement = nil
|
219
219
|
events_to_retry = []
|
220
|
-
insert_sql = ""
|
221
|
-
sql_len = 0
|
222
220
|
is_insert_err = false
|
223
221
|
|
224
222
|
begin
|
@@ -231,7 +229,11 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
231
229
|
end
|
232
230
|
|
233
231
|
begin
|
234
|
-
|
232
|
+
pos = 0
|
233
|
+
insert_sql = ""
|
234
|
+
@logger.debug("events size: #{events.size}")
|
235
|
+
while pos < events.size do
|
236
|
+
event = events[pos]
|
235
237
|
statement = connection.prepareStatement(
|
236
238
|
(@unsafe_statement == true) ? event.sprintf(@statement[0]) : @statement[0]
|
237
239
|
)
|
@@ -239,24 +241,45 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
239
241
|
statement = add_statement_event_params(statement, event) if @statement.length > 1
|
240
242
|
stmt_str = statement.toString
|
241
243
|
one_sql = stmt_str[stmt_str.index(": ") + 2, stmt_str.length]
|
242
|
-
|
244
|
+
# on duplicate key start pos
|
245
|
+
on_duplicate_pos = one_sql.downcase.index(/on(\s+)duplicate/)
|
246
|
+
if on_duplicate_pos == nil
|
247
|
+
batch_insert_values_end_pos = one_sql.length
|
248
|
+
else
|
249
|
+
batch_insert_values_end_pos = on_duplicate_pos
|
250
|
+
end
|
251
|
+
@logger.debug("one_sql: #{one_sql}")
|
252
|
+
# trigger batch insert
|
253
|
+
if insert_sql.length + one_sql.length >= @commit_size
|
254
|
+
if insert_sql.length == 0
|
255
|
+
insert_sql = one_sql[0, batch_insert_values_end_pos]
|
256
|
+
end
|
257
|
+
if batch_insert_values_end_pos != one_sql.length
|
258
|
+
insert_sql.concat(one_sql[batch_insert_values_end_pos, one_sql.length - batch_insert_values_end_pos])
|
259
|
+
end
|
260
|
+
@logger.debug("batch 1 insert sql: #{insert_sql}")
|
243
261
|
statement.execute(insert_sql)
|
244
|
-
sql_len = 0
|
245
262
|
insert_sql = ""
|
246
263
|
end
|
247
|
-
if
|
248
|
-
insert_sql = one_sql
|
249
|
-
sql_len = one_sql.length
|
264
|
+
if insert_sql.length == 0
|
265
|
+
insert_sql = one_sql[0, batch_insert_values_end_pos]
|
250
266
|
else
|
251
|
-
insert_sql
|
252
|
-
|
267
|
+
insert_sql = insert_sql.rstrip
|
268
|
+
insert_sql.concat(", ").concat(one_sql[@pre_len, batch_insert_values_end_pos - @pre_len])
|
269
|
+
end
|
270
|
+
# loop to end
|
271
|
+
if pos == events.size - 1
|
272
|
+
if batch_insert_values_end_pos != one_sql.length
|
273
|
+
insert_sql.concat(one_sql[batch_insert_values_end_pos, one_sql.length - batch_insert_values_end_pos])
|
274
|
+
end
|
275
|
+
@logger.debug("batch 2 insert sql: #{insert_sql}")
|
276
|
+
statement.execute(insert_sql)
|
253
277
|
end
|
278
|
+
pos += 1
|
254
279
|
rescue => e
|
255
280
|
retry_exception?(e, event.to_json())
|
256
|
-
is_insert_err = true
|
257
281
|
end
|
258
282
|
end
|
259
|
-
statement.execute(insert_sql)
|
260
283
|
rescue => e
|
261
284
|
@logger.error("Submit data error, sql is #{insert_sql}, error is #{e}")
|
262
285
|
is_insert_err = true
|
@@ -313,8 +336,7 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
313
336
|
@logger.error("JDBC - max_flush_exceptions has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are being skipped. See previously logged exceptions for details.")
|
314
337
|
break
|
315
338
|
end
|
316
|
-
raise "JDBC - max_flush_exceptions #{max_flush_exceptions} has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are
|
317
|
-
break
|
339
|
+
raise "JDBC - max_flush_exceptions #{max_flush_exceptions} has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are crashed. See previously logged exceptions for details."
|
318
340
|
end
|
319
341
|
end
|
320
342
|
|
@@ -409,4 +431,4 @@ class LogStash::Outputs::Analyticdb < LogStash::Outputs::Base
|
|
409
431
|
doubled = current_interval * 2
|
410
432
|
doubled > @retry_max_interval ? @retry_max_interval : doubled
|
411
433
|
end
|
412
|
-
end # class LogStash::Outputs::analyticdb
|
434
|
+
end # class LogStash::Outputs::analyticdb
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-analyticdb'
|
3
|
-
s.version = '5.4.0.
|
3
|
+
s.version = '5.4.0.11'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = 'This plugin allows you to output to SQL, via JDBC'
|
6
6
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-analyticdb'. This gem is not a stand-alone program"
|
Binary file
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-analyticdb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.4.0.
|
4
|
+
version: 5.4.0.11
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- the_angry_angel
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-06-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -125,8 +125,8 @@ files:
|
|
125
125
|
- vendor/jar-dependencies/runtime-jars/HikariCP-2.7.2.jar
|
126
126
|
- vendor/jar-dependencies/runtime-jars/log4j-api-2.6.2.jar
|
127
127
|
- vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.6.2.jar
|
128
|
-
- vendor/jar-dependencies/runtime-jars/mysql-connector-java-5.1.36.jar
|
129
128
|
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.25.jar
|
129
|
+
- vendor/jar/jdbc/mysql-connector-java-8.0.11.jar
|
130
130
|
homepage: https://github.com/wuchase/logstash-output-analyticdb
|
131
131
|
licenses:
|
132
132
|
- Apache License (2.0)
|
@@ -151,7 +151,7 @@ requirements:
|
|
151
151
|
- jar 'com.zaxxer:HikariCP', '2.7.2'
|
152
152
|
- jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.6.2'
|
153
153
|
rubyforge_project:
|
154
|
-
rubygems_version: 2.7.
|
154
|
+
rubygems_version: 2.7.10
|
155
155
|
signing_key:
|
156
156
|
specification_version: 4
|
157
157
|
summary: This plugin allows you to output to SQL, via JDBC
|
Binary file
|