logstash-output-google_bigquery 2.0.2 → 2.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/README.md +3 -0
- data/lib/logstash/outputs/google_bigquery.rb +37 -15
- data/logstash-output-google_bigquery.gemspec +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 872d8260b8414aac2be0a457bd42ce98392373bc
|
4
|
+
data.tar.gz: 6bca379ed332084d90d358c54b50bca7b1939419
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 175bea2712feac1c5eede53a9f70e29f46abcbb176ff137d39af8cfcaa6037ae89b7157848dc261e8ba18a2bca514488f61f76b1c9dae4a6a04145f1dee92000
|
7
|
+
data.tar.gz: 20bf02602e61780f7446a4cfca7f1098555c9fac39a13284d7b62f279c9971b2fa225971b2366b036ad444a4cc5488479d04f960b935a52b87c3b172c02c9b66
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,8 @@
|
|
1
|
+
## 2.0.3
|
2
|
+
- Add support for specifying schema as a hash
|
3
|
+
- Bubble up error message that BQ returns on an error
|
4
|
+
- Add the table_separator option on bigquery output
|
5
|
+
|
1
6
|
## 2.0.0
|
2
7
|
- Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
|
3
8
|
instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
|
data/README.md
CHANGED
@@ -1,5 +1,8 @@
|
|
1
1
|
# Logstash Plugin
|
2
2
|
|
3
|
+
[![Build
|
4
|
+
Status](http://build-eu-00.elastic.co/view/LS%20Plugins/view/LS%20Outputs/job/logstash-plugin-output-google_bigquery-unit/badge/icon)](http://build-eu-00.elastic.co/view/LS%20Plugins/view/LS%20Outputs/job/logstash-plugin-output-google_bigquery-unit/)
|
5
|
+
|
3
6
|
This is a plugin for [Logstash](https://github.com/elastic/logstash).
|
4
7
|
|
5
8
|
It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
|
@@ -66,7 +66,7 @@ require "logstash/json"
|
|
66
66
|
# google_bigquery {
|
67
67
|
# project_id => "folkloric-guru-278" (required)
|
68
68
|
# dataset => "logs" (required)
|
69
|
-
# csv_schema => "path:STRING,status:INTEGER,score:FLOAT" (required)
|
69
|
+
# csv_schema => "path:STRING,status:INTEGER,score:FLOAT" (required*)
|
70
70
|
# key_path => "/path/to/privatekey.p12" (required)
|
71
71
|
# key_password => "notasecret" (optional)
|
72
72
|
# service_account => "1234@developer.gserviceaccount.com" (required)
|
@@ -79,6 +79,8 @@ require "logstash/json"
|
|
79
79
|
# }
|
80
80
|
# }
|
81
81
|
#
|
82
|
+
# * Specify either a csv_schema or a json_schema.
|
83
|
+
#
|
82
84
|
# Improvements TODO list:
|
83
85
|
# - Refactor common code between Google BQ and GCS plugins.
|
84
86
|
# - Turn Google API code into a Plugin Mixin (like AwsConfig).
|
@@ -100,7 +102,22 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
100
102
|
# Schema for log data. It must follow this format:
|
101
103
|
# <field1-name>:<field1-type>,<field2-name>:<field2-type>,...
|
102
104
|
# Example: path:STRING,status:INTEGER,score:FLOAT
|
103
|
-
config :csv_schema, :validate => :string, :required =>
|
105
|
+
config :csv_schema, :validate => :string, :required => false, :default => nil
|
106
|
+
|
107
|
+
# Schema for log data, as a hash. Example:
|
108
|
+
# json_schema => {
|
109
|
+
# fields => [{
|
110
|
+
# name => "timestamp"
|
111
|
+
# type => "TIMESTAMP"
|
112
|
+
# }, {
|
113
|
+
# name => "host"
|
114
|
+
# type => "STRING"
|
115
|
+
# }, {
|
116
|
+
# name => "message"
|
117
|
+
# type => "STRING"
|
118
|
+
# }]
|
119
|
+
# }
|
120
|
+
config :json_schema, :validate => :hash, :required => false, :default => nil
|
104
121
|
|
105
122
|
# Indicates if BigQuery should allow extra values that are not represented in the table schema.
|
106
123
|
# If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
|
@@ -148,25 +165,30 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
|
|
148
165
|
|
149
166
|
@logger.debug("BQ: register plugin")
|
150
167
|
|
151
|
-
|
168
|
+
if !@csv_schema.nil?
|
169
|
+
@fields = Array.new
|
170
|
+
|
171
|
+
CSV.parse(@csv_schema.gsub('\"', '""')).flatten.each do |field|
|
172
|
+
temp = field.strip.split(":")
|
152
173
|
|
153
|
-
|
154
|
-
|
174
|
+
# Check that the field in the schema follows the format (<name>:<value>)
|
175
|
+
if temp.length != 2
|
176
|
+
raise "BigQuery schema must follow the format <field-name>:<field-value>"
|
177
|
+
end
|
155
178
|
|
156
|
-
|
157
|
-
if temp.length != 2
|
158
|
-
raise "BigQuery schema must follow the format <field-name>:<field-value>"
|
179
|
+
@fields << { "name" => temp[0], "type" => temp[1] }
|
159
180
|
end
|
160
181
|
|
161
|
-
|
162
|
-
|
182
|
+
# Check that we have at least one field in the schema
|
183
|
+
if @fields.length == 0
|
184
|
+
raise "BigQuery schema must contain at least one field"
|
185
|
+
end
|
163
186
|
|
164
|
-
|
165
|
-
|
166
|
-
|
187
|
+
@json_schema = { "fields" => @fields }
|
188
|
+
end
|
189
|
+
if @json_schema.nil?
|
190
|
+
raise "Configuration must provide either json_schema or csv_schema."
|
167
191
|
end
|
168
|
-
|
169
|
-
@json_schema = { "fields" => @fields }
|
170
192
|
|
171
193
|
@upload_queue = Queue.new
|
172
194
|
@delete_queue = Queue.new
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-output-google_bigquery'
|
4
|
-
s.version = '2.0.
|
4
|
+
s.version = '2.0.3'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "Plugin to upload log events to Google BigQuery (BQ)"
|
7
7
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-google_bigquery
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.0.
|
4
|
+
version: 2.0.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2016-01-04 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -95,7 +95,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
95
95
|
version: '0'
|
96
96
|
requirements: []
|
97
97
|
rubyforge_project:
|
98
|
-
rubygems_version: 2.4.
|
98
|
+
rubygems_version: 2.4.5
|
99
99
|
signing_key:
|
100
100
|
specification_version: 4
|
101
101
|
summary: Plugin to upload log events to Google BigQuery (BQ)
|