logstash-output-google_bigquery 3.2.3 → 3.2.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/LICENSE +1 -1
- data/docs/index.asciidoc +4 -1
- data/lib/logstash/outputs/google_bigquery.rb +22 -7
- data/logstash-output-google_bigquery.gemspec +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: aa0d62d4409731fe59435066046e0ab5a7696f4becef0f896bb6e86cac701ae9
|
4
|
+
data.tar.gz: 4a19742786e58c9a9aa97262f25a5773e155c6b31b33d9e3760025aba45ae87a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6ea1f2b8c954536561fe81aa9b8c1fcfbb06553ac455f00f45fcbc66440027f36ac57ff24bbb769a189d661922d5b9dcd0a4527115563c084e4bf2a26c80cef9
|
7
|
+
data.tar.gz: de8befd5d24ba28da45b33f0cbdb395450110eaec708484e49441c768f0231e793f23a63c00febae31b0e1579a5a9ac0a4f92dfa5de7512ddc7988a388a1ef3e
|
data/CHANGELOG.md
CHANGED
data/LICENSE
CHANGED
data/docs/index.asciidoc
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
:plugin: google_bigquery
|
2
2
|
:type: output
|
3
|
+
:default_codec: plain
|
3
4
|
|
4
5
|
///////////////////////////////////////////
|
5
6
|
START - GENERATED VARIABLES, DO NOT EDIT!
|
@@ -299,4 +300,6 @@ around one hour).
|
|
299
300
|
|
300
301
|
|
301
302
|
[id="plugins-{type}s-{plugin}-common-options"]
|
302
|
-
include::{include_path}/{type}.asciidoc[]
|
303
|
+
include::{include_path}/{type}.asciidoc[]
|
304
|
+
|
305
|
+
:default_codec!:
|
@@ -129,8 +129,18 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
129
129
|
# }
|
130
130
|
config :json_schema, :validate => :hash, :required => false, :default => nil
|
131
131
|
|
132
|
-
# Indicates if BigQuery should
|
133
|
-
# If true, the extra values are
|
132
|
+
# Indicates if BigQuery should ignore values that are not represented in the table schema.
|
133
|
+
# If true, the extra values are discarded.
|
134
|
+
# If false, BigQuery will reject the records with extra fields and the job will fail.
|
135
|
+
# The default value is false.
|
136
|
+
#
|
137
|
+
# NOTE: You may want to add a Logstash filter like the following to remove common fields it adds:
|
138
|
+
# [source,ruby]
|
139
|
+
# ----------------------------------
|
140
|
+
# mutate {
|
141
|
+
# remove_field => ["@version","@timestamp","path","host","type", "message"]
|
142
|
+
# }
|
143
|
+
# ----------------------------------
|
134
144
|
config :ignore_unknown_values, :validate => :boolean, :default => false
|
135
145
|
|
136
146
|
# Path to private key file for Google Service Account.
|
@@ -217,6 +227,9 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
217
227
|
def receive(event)
|
218
228
|
@logger.debug("BQ: receive method called", :event => event)
|
219
229
|
|
230
|
+
# TODO validate the schema if @ignore_unknown_values is off and alert the user now.
|
231
|
+
# consider creating a bad-data table to store invalid records
|
232
|
+
|
220
233
|
# Message must be written as json
|
221
234
|
message = LogStash::Json.dump(event.to_hash)
|
222
235
|
# Remove "@" from property names
|
@@ -360,7 +373,7 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
360
373
|
when "DONE"
|
361
374
|
if job_status["status"].has_key?("errorResult")
|
362
375
|
@logger.error("BQ: job failed, please enable debug and check full "\
|
363
|
-
"response (
|
376
|
+
"response (the issue is probably an incompatible "\
|
364
377
|
"schema). NOT deleting local file.",
|
365
378
|
:job_id => job_id,
|
366
379
|
:filename => filename,
|
@@ -411,9 +424,13 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
411
424
|
@logger.debug("BQ: reenqueue as log file is being currently appended to.",
|
412
425
|
:filename => filename)
|
413
426
|
@upload_queue << filename
|
427
|
+
|
414
428
|
# If we got here, it means that older files were uploaded, so let's
|
415
|
-
# wait
|
416
|
-
|
429
|
+
# wait before checking on this file again.
|
430
|
+
#
|
431
|
+
# Use the min so we don't wait too long if the logs start rotating too quickly
|
432
|
+
# due to an increased number of events.
|
433
|
+
sleep [60, @uploader_interval_secs].min
|
417
434
|
next
|
418
435
|
else
|
419
436
|
@logger.debug("BQ: flush and close file to be uploaded.",
|
@@ -435,8 +452,6 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
435
452
|
:filename => filename)
|
436
453
|
File.delete(filename)
|
437
454
|
end
|
438
|
-
|
439
|
-
sleep @uploader_interval_secs
|
440
455
|
end
|
441
456
|
end
|
442
457
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-google_bigquery'
|
3
|
-
s.version = '3.2.
|
3
|
+
s.version = '3.2.4'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "Writes events to Google BigQuery"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-google_bigquery
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.2.
|
4
|
+
version: 3.2.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2018-04-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|