logstash-output-google_bigquery 4.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (53) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +71 -0
  3. data/CONTRIBUTORS +15 -0
  4. data/Gemfile +11 -0
  5. data/LICENSE +13 -0
  6. data/NOTICE.TXT +5 -0
  7. data/README.md +100 -0
  8. data/docs/index.asciidoc +348 -0
  9. data/lib/logstash-output-google_bigquery_jars.rb +38 -0
  10. data/lib/logstash/outputs/bigquery/batcher.rb +82 -0
  11. data/lib/logstash/outputs/bigquery/schema.rb +93 -0
  12. data/lib/logstash/outputs/bigquery/streamclient.rb +120 -0
  13. data/lib/logstash/outputs/google_bigquery.rb +280 -0
  14. data/logstash-output-google_bigquery.gemspec +31 -0
  15. data/spec/outputs/bigquery/batcher_spec.rb +110 -0
  16. data/spec/outputs/bigquery/schema_spec.rb +101 -0
  17. data/spec/outputs/google_bigquery_spec.rb +154 -0
  18. data/vendor/jar-dependencies/com/fasterxml/jackson/core/jackson-core/2.1.3/jackson-core-2.1.3.jar +0 -0
  19. data/vendor/jar-dependencies/com/google/api-client/google-api-client/1.23.0/google-api-client-1.23.0.jar +0 -0
  20. data/vendor/jar-dependencies/com/google/api/api-common/1.5.0/api-common-1.5.0.jar +0 -0
  21. data/vendor/jar-dependencies/com/google/api/gax-httpjson/0.40.0/gax-httpjson-0.40.0.jar +0 -0
  22. data/vendor/jar-dependencies/com/google/api/gax/1.23.0/gax-1.23.0.jar +0 -0
  23. data/vendor/jar-dependencies/com/google/api/grpc/proto-google-common-protos/1.7.0/proto-google-common-protos-1.7.0.jar +0 -0
  24. data/vendor/jar-dependencies/com/google/api/grpc/proto-google-iam-v1/0.8.0/proto-google-iam-v1-0.8.0.jar +0 -0
  25. data/vendor/jar-dependencies/com/google/apis/google-api-services-bigquery/v2-rev377-1.23.0/google-api-services-bigquery-v2-rev377-1.23.0.jar +0 -0
  26. data/vendor/jar-dependencies/com/google/auth/google-auth-library-credentials/0.9.0/google-auth-library-credentials-0.9.0.jar +0 -0
  27. data/vendor/jar-dependencies/com/google/auth/google-auth-library-oauth2-http/0.9.0/google-auth-library-oauth2-http-0.9.0.jar +0 -0
  28. data/vendor/jar-dependencies/com/google/auto/value/auto-value/1.4/auto-value-1.4.jar +0 -0
  29. data/vendor/jar-dependencies/com/google/cloud/google-cloud-bigquery/1.24.1/google-cloud-bigquery-1.24.1.jar +0 -0
  30. data/vendor/jar-dependencies/com/google/cloud/google-cloud-core-http/1.24.1/google-cloud-core-http-1.24.1.jar +0 -0
  31. data/vendor/jar-dependencies/com/google/cloud/google-cloud-core/1.24.1/google-cloud-core-1.24.1.jar +0 -0
  32. data/vendor/jar-dependencies/com/google/code/findbugs/jsr305/3.0.1/jsr305-3.0.1.jar +0 -0
  33. data/vendor/jar-dependencies/com/google/code/gson/gson/2.7/gson-2.7.jar +0 -0
  34. data/vendor/jar-dependencies/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar +0 -0
  35. data/vendor/jar-dependencies/com/google/guava/guava/20.0/guava-20.0.jar +0 -0
  36. data/vendor/jar-dependencies/com/google/http-client/google-http-client-appengine/1.23.0/google-http-client-appengine-1.23.0.jar +0 -0
  37. data/vendor/jar-dependencies/com/google/http-client/google-http-client-jackson/1.23.0/google-http-client-jackson-1.23.0.jar +0 -0
  38. data/vendor/jar-dependencies/com/google/http-client/google-http-client-jackson2/1.23.0/google-http-client-jackson2-1.23.0.jar +0 -0
  39. data/vendor/jar-dependencies/com/google/http-client/google-http-client/1.23.0/google-http-client-1.23.0.jar +0 -0
  40. data/vendor/jar-dependencies/com/google/oauth-client/google-oauth-client/1.23.0/google-oauth-client-1.23.0.jar +0 -0
  41. data/vendor/jar-dependencies/com/google/protobuf/protobuf-java-util/3.5.1/protobuf-java-util-3.5.1.jar +0 -0
  42. data/vendor/jar-dependencies/com/google/protobuf/protobuf-java/3.5.1/protobuf-java-3.5.1.jar +0 -0
  43. data/vendor/jar-dependencies/commons-codec/commons-codec/1.9/commons-codec-1.9.jar +0 -0
  44. data/vendor/jar-dependencies/commons-logging/commons-logging/1.2/commons-logging-1.2.jar +0 -0
  45. data/vendor/jar-dependencies/io/grpc/grpc-context/1.9.0/grpc-context-1.9.0.jar +0 -0
  46. data/vendor/jar-dependencies/io/opencensus/opencensus-api/0.11.1/opencensus-api-0.11.1.jar +0 -0
  47. data/vendor/jar-dependencies/io/opencensus/opencensus-contrib-http-util/0.11.1/opencensus-contrib-http-util-0.11.1.jar +0 -0
  48. data/vendor/jar-dependencies/joda-time/joda-time/2.9.2/joda-time-2.9.2.jar +0 -0
  49. data/vendor/jar-dependencies/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar +0 -0
  50. data/vendor/jar-dependencies/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar +0 -0
  51. data/vendor/jar-dependencies/org/codehaus/jackson/jackson-core-asl/1.9.11/jackson-core-asl-1.9.11.jar +0 -0
  52. data/vendor/jar-dependencies/org/threeten/threetenbp/1.3.3/threetenbp-1.3.3.jar +0 -0
  53. metadata +178 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 96fffb2af160bfe6dc285eaa247f4b9eec66f135f36019bacb5f7fc014116f2f
4
+ data.tar.gz: 3d63d628effd3399b8c7d45f6b93b2bd0c4c31935e34346dda7309cbe2b1dfcd
5
+ SHA512:
6
+ metadata.gz: a274cf2d2b610451eeca3dbc78d23bf05c01f7de6fdedcc87c84e25805afef53cc840292757e172d9317565e8be2a04dd6e3d67870deec150900dffb27b460ed
7
+ data.tar.gz: c20e5be7ed18d9b34935eef10a444a692df62b0fc6343cde82db20ddb2a826acf99c92c961f056203f7395583c6597d720c9b5b53f46f789fc1737a9907e53eb
@@ -0,0 +1,71 @@
1
+ ## 4.0.0
2
+
3
+ **Breaking**: the update to 4.0.0 requires that you use an IAM JSON credentials file
4
+ rather than the deprecated P12 files.
5
+ Applications using Application Default Credentials (ADC) _will_ continue to work.
6
+
7
+ This plugin now uses the BigQuery Streaming API which incurs an expense on upload.
8
+
9
+ - The advantages of the streaming API are:
10
+ - It allows real-time incoming data analysis and queries.
11
+ - It allows Logstash instances to be started/stopped without worrying about failed batch jobs.
12
+ - The client library has better support and performance.
13
+ - New configuration options:
14
+ - `batch_size` - The number of messages to upload at once.
15
+ - `json_key_file` - The JSON IAM service account credentials to use with the plugin.
16
+ - `batch_size_bytes` - The maximum number of bytes to upload as part of a batch (approximate).
17
+ - Deprecated configurations:
18
+ - `uploader_interval_secs` - No longer used
19
+ - `deleter_interval_secs` - No longer used
20
+ - `temp_file_prefix` - No longer used
21
+ - `temp_directory` - No longer used
22
+ - `key_password` - Use `json_key_file` or Application Default Credentials (ADC) instead.
23
+ - `service_account` - Use `json_key_file` or Application Default Credentials (ADC) instead.
24
+ - Obsolete configurations:
25
+ - `key_path` - Use `json_key_file` or Application Default Credentials (ADC) instead.
26
+ See [the documentation](https://www.elastic.co/guide/en/logstash/current/plugins-outputs-google_bigquery.html#plugins-outputs-google_bigquery-key_path)
27
+ for help about moving to JSON key files.
28
+
29
+ ## 3.2.4
30
+ - Docs: Set the default_codec doc attribute.
31
+
32
+ ## 3.2.3
33
+ - Update gemspec summary
34
+
35
+ ## 3.2.2
36
+ - Fix some documentation issues
37
+
38
+ # 3.2.0
39
+ - Add file recovery when plugin crashes
40
+
41
+ # 3.1.0
42
+ - Fix error checking in the plugin to properly handle failed inserts
43
+
44
+ # 3.0.2
45
+ - Docs: Fix doc formatting
46
+
47
+ # 3.0.1
48
+ - Pin version of gems whose latest releases only work with ruby 2.x
49
+
50
+ ## 3.0.0
51
+ - Breaking: Updated plugin to use new Java Event APIs
52
+ - relax contrains on logstash-core-plugin-api
53
+ - mark this plugin as concurrency :single
54
+ - update .travis.yml
55
+
56
+ ## 2.0.5
57
+ - Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash
58
+
59
+ ## 2.0.4
60
+ - New dependency requirements for logstash-core for the 5.0 release
61
+
62
+ ## 2.0.3
63
+ - Add support for specifying schema as a hash
64
+ - Bubble up error message that BQ returns on an error
65
+ - Add the table_separator option on bigquery output
66
+
67
+ ## 2.0.0
68
+ - Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
69
+ instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
70
+ - Dependency on logstash-core update to 2.0
71
+
@@ -0,0 +1,15 @@
1
+ The following is a list of people who have contributed ideas, code, bug
2
+ reports, or in general have helped logstash along its way.
3
+
4
+ Contributors:
5
+ * Aaron Mildenstein (untergeek)
6
+ * Google LLC
7
+ * Joseph Lewis III (josephlewis42)
8
+ * Pier-Hugues Pellerin (ph)
9
+ * Richard Pijnenburg (electrical)
10
+ * Rodrigo De Castro (rdcastro)
11
+
12
+ Note: If you've sent us patches, bug reports, or otherwise contributed to
13
+ Logstash, and you aren't on the list above and want to be, please let us know
14
+ and we'll make sure you're here. Contributions from folks like you are what make
15
+ open source awesome.
data/Gemfile ADDED
@@ -0,0 +1,11 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7
+
8
+ if Dir.exist?(logstash_path) && use_logstash_source
9
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
+ end
data/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2012-2018 Elasticsearch <http://www.elastic.co>
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
@@ -0,0 +1,5 @@
1
+ Elasticsearch
2
+ Copyright 2012-2015 Elasticsearch
3
+
4
+ This product includes software developed by The Apache Software
5
+ Foundation (http://www.apache.org/).
@@ -0,0 +1,100 @@
1
+ # Logstash Plugin
2
+
3
+ [![Travis Build Status](https://travis-ci.org/logstash-plugins/logstash-output-google_bigquery.svg)](https://travis-ci.org/logstash-plugins/logstash-output-google_bigquery)
4
+
5
+ This is a plugin for [Logstash](https://github.com/elastic/logstash).
6
+
7
+ It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
8
+
9
+ ## Documentation
10
+
11
+ Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/).
12
+
13
+ - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive
14
+ - For more asciidoc formatting tips, see the excellent reference here https://github.com/elastic/docs#asciidoc-guide
15
+
16
+ ## Need Help?
17
+
18
+ Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum.
19
+
20
+ ## Developing
21
+
22
+ ### 1. Plugin Developement and Testing
23
+
24
+ #### Code
25
+ - To get started, you'll need JRuby with the Bundler gem installed.
26
+
27
+ - Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example).
28
+
29
+ - Install dependencies
30
+ ```sh
31
+ bundle install
32
+ bundle exec rake vendor
33
+ ```
34
+
35
+ #### Test
36
+
37
+ - Update your dependencies
38
+
39
+ ```sh
40
+ bundle install
41
+ bundle exec rake vendor
42
+ ```
43
+
44
+ - Run tests
45
+
46
+ ```sh
47
+ bundle exec rspec
48
+ ```
49
+
50
+ ### 2. Running your unpublished Plugin in Logstash
51
+
52
+ #### 2.1 Run in a local Logstash clone
53
+
54
+ - Edit Logstash `Gemfile` and add the local plugin path, for example:
55
+ ```ruby
56
+ gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome"
57
+ ```
58
+ - Install plugin
59
+ ```sh
60
+ # Logstash 2.3 and higher
61
+ bin/logstash-plugin install --no-verify
62
+
63
+ # Prior to Logstash 2.3
64
+ bin/plugin install --no-verify
65
+
66
+ ```
67
+ - Run Logstash with your plugin
68
+ ```sh
69
+ bin/logstash -e 'filter {awesome {}}'
70
+ ```
71
+ At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash.
72
+
73
+ #### 2.2 Run in an installed Logstash
74
+
75
+ You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using:
76
+
77
+ - Build your plugin gem
78
+ ```sh
79
+ gem build logstash-filter-awesome.gemspec
80
+ ```
81
+ - Install the plugin from the Logstash home
82
+ ```sh
83
+ # Logstash 2.3 and higher
84
+ bin/logstash-plugin install --no-verify
85
+
86
+ # Prior to Logstash 2.3
87
+ bin/plugin install --no-verify
88
+
89
+ ```
90
+ - Start Logstash and proceed to test the plugin
91
+
92
+ ## Contributing
93
+
94
+ All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
95
+
96
+ Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here.
97
+
98
+ It is more important to the community that you are able to contribute.
99
+
100
+ For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file.
@@ -0,0 +1,348 @@
1
+ :plugin: google_bigquery
2
+ :type: output
3
+ :default_codec: plain
4
+
5
+ ///////////////////////////////////////////
6
+ START - GENERATED VARIABLES, DO NOT EDIT!
7
+ ///////////////////////////////////////////
8
+ :version: %VERSION%
9
+ :release_date: %RELEASE_DATE%
10
+ :changelog_url: %CHANGELOG_URL%
11
+ :include_path: ../../../../logstash/docs/include
12
+ ///////////////////////////////////////////
13
+ END - GENERATED VARIABLES, DO NOT EDIT!
14
+ ///////////////////////////////////////////
15
+
16
+ [id="plugins-{type}s-{plugin}"]
17
+
18
+ === Google BigQuery output plugin
19
+
20
+ include::{include_path}/plugin_header.asciidoc[]
21
+
22
+ ==== Description
23
+
24
+ ===== Summary
25
+
26
+ This plugin uploads events to Google BigQuery using the streaming API
27
+ so data can become available nearly immediately.
28
+
29
+ You can configure it to flush periodically, after N events or after
30
+ a certain amount of data is ingested.
31
+
32
+ ===== Environment Configuration
33
+
34
+ You must enable BigQuery on your Google Cloud Storage (GCS) account and create a dataset to
35
+ hold the tables this plugin generates.
36
+
37
+ You must also grant the service account this plugin uses access to
38
+ the dataset.
39
+
40
+ You can use https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html[Logstash conditionals]
41
+ and multiple configuration blocks to upload events with different structures.
42
+
43
+ ===== Usage
44
+ This is an example of logstash config:
45
+
46
+ [source,ruby]
47
+ --------------------------
48
+ output {
49
+ google_bigquery {
50
+ project_id => "folkloric-guru-278" (required)
51
+ dataset => "logs" (required)
52
+ csv_schema => "path:STRING,status:INTEGER,score:FLOAT" (required) <1>
53
+ json_key_file => "/path/to/key.json" (optional) <2>
54
+ error_directory => "/tmp/bigquery-errors" (required)
55
+ date_pattern => "%Y-%m-%dT%H:00" (optional)
56
+ flush_interval_secs => 30 (optional)
57
+ }
58
+ }
59
+ --------------------------
60
+
61
+ <1> Specify either a csv_schema or a json_schema.
62
+
63
+ <2> If the key is not used, then the plugin tries to find
64
+ https://cloud.google.com/docs/authentication/production[Application Default Credentials]
65
+
66
+ ===== Considerations
67
+
68
+ * There is a small fee to insert data into BigQuery using the streaming API
69
+ * This plugin buffers events in-memory, so make sure the flush configurations are appropriate
70
+ for your use-case and consider using
71
+ https://www.elastic.co/guide/en/logstash/current/persistent-queues.html[Logstash Persistent Queues]
72
+
73
+ ===== Additional Resources
74
+
75
+ * https://cloud.google.com/docs/authentication/production[Application Default Credentials (ADC) Overview]
76
+ * https://cloud.google.com/bigquery/[BigQuery Introduction]
77
+ * https://cloud.google.com/bigquery/docs/schemas[BigQuery Schema Formats and Types]
78
+ * https://cloud.google.com/bigquery/pricing[Pricing Information]
79
+
80
+ [id="plugins-{type}s-{plugin}-options"]
81
+ ==== Google BigQuery Output Configuration Options
82
+
83
+ This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
84
+
85
+ [cols="<,<,<",options="header",]
86
+ |=======================================================================
87
+ |Setting |Input type|Required
88
+ | <<plugins-{type}s-{plugin}-batch_size>> |<<number,number>>|No
89
+ | <<plugins-{type}s-{plugin}-batch_size_bytes>> |<<number,number>>|No
90
+ | <<plugins-{type}s-{plugin}-csv_schema>> |<<string,string>>|No
91
+ | <<plugins-{type}s-{plugin}-dataset>> |<<string,string>>|Yes
92
+ | <<plugins-{type}s-{plugin}-date_pattern>> |<<string,string>>|No
93
+ | <<plugins-{type}s-{plugin}-deleter_interval_secs>> |<<number,number>>|__Deprecated__
94
+ | <<plugins-{type}s-{plugin}-error_directory>> |<<string,string>>|Yes
95
+ | <<plugins-{type}s-{plugin}-flush_interval_secs>> |<<number,number>>|No
96
+ | <<plugins-{type}s-{plugin}-ignore_unknown_values>> |<<boolean,boolean>>|No
97
+ | <<plugins-{type}s-{plugin}-json_key_file>> |<<string,string>>|No
98
+ | <<plugins-{type}s-{plugin}-json_schema>> |<<hash,hash>>|No
99
+ | <<plugins-{type}s-{plugin}-key_password>> |<<string,string>>|__Deprecated__
100
+ | <<plugins-{type}s-{plugin}-key_path>> |<<string,string>>|*Obsolete*
101
+ | <<plugins-{type}s-{plugin}-project_id>> |<<string,string>>|Yes
102
+ | <<plugins-{type}s-{plugin}-service_account>> |<<string,string>>|__Deprecated__
103
+ | <<plugins-{type}s-{plugin}-table_prefix>> |<<string,string>>|No
104
+ | <<plugins-{type}s-{plugin}-table_separator>> |<<string,string>>|No
105
+ | <<plugins-{type}s-{plugin}-temp_directory>> |<<string,string>>|__Deprecated__
106
+ | <<plugins-{type}s-{plugin}-temp_file_prefix>> |<<string,string>>|__Deprecated__
107
+ | <<plugins-{type}s-{plugin}-uploader_interval_secs>> |<<number,number>>|__Deprecated__
108
+ |=======================================================================
109
+
110
+ Also see <<plugins-{type}s-{plugin}-common-options>> for a list of options supported by all
111
+ output plugins.
112
+
113
+ &nbsp;
114
+
115
+ [id="plugins-{type}s-{plugin}-batch_size"]
116
+ ===== `batch_size`
117
+
118
+ * Value type is <<number,number>>
119
+ * Default value is `128`
120
+
121
+ The number of messages to upload at a single time. (< 1000, default: 128)
122
+
123
+ [id="plugins-{type}s-{plugin}-batch_size_bytes"]
124
+ ===== `batch_size_bytes`
125
+
126
+ * Value type is <<number,number>>
127
+ * Default value is `1_000_000`
128
+
129
+ An approximate number of bytes to upload as part of a batch. Default: 1MB
130
+
131
+ [id="plugins-{type}s-{plugin}-csv_schema"]
132
+ ===== `csv_schema`
133
+
134
+ * Value type is <<string,string>>
135
+ * Default value is `nil`
136
+
137
+ Schema for log data. It must follow the format `name1:type1(,name2:type2)*`.
138
+ For example, `path:STRING,status:INTEGER,score:FLOAT`.
139
+
140
+ [id="plugins-{type}s-{plugin}-dataset"]
141
+ ===== `dataset`
142
+
143
+ * This is a required setting.
144
+ * Value type is <<string,string>>
145
+ * There is no default value for this setting.
146
+
147
+ The BigQuery dataset the tables for the events will be added to.
148
+
149
+ [id="plugins-{type}s-{plugin}-date_pattern"]
150
+ ===== `date_pattern`
151
+
152
+ * Value type is <<string,string>>
153
+ * Default value is `"%Y-%m-%dT%H:00"`
154
+
155
+ Time pattern for BigQuery table, defaults to hourly tables.
156
+ Must Time.strftime patterns: www.ruby-doc.org/core-2.0/Time.html#method-i-strftime
157
+
158
+ [id="plugins-{type}s-{plugin}-deleter_interval_secs"]
159
+ ===== `deleter_interval_secs`
160
+
161
+ * Value type is <<number,number>>
162
+
163
+ **Deprecated:** this field is no longer used because temporary files are no longer
164
+ stored on the hard drive.
165
+
166
+ [id="plugins-{type}s-{plugin}-error_directory"]
167
+ ===== `error_directory`
168
+
169
+ * This is a required setting.
170
+ * Value type is <<string,string>>
171
+ * Default value is `"/tmp/bigquery"`.
172
+
173
+ The location to store events that could not be uploaded due to errors.
174
+ Consider using an additional Logstash input to pipe the contents of
175
+ these to an alert platform so you can manually fix the events.
176
+
177
+ Or use https://cloud.google.com/storage/docs/gcs-fuse[GCS FUSE] to
178
+ transparently upload to a GCS bucket.
179
+
180
+ Files names follow the pattern `[table name]-[UNIX timestamp].log`
181
+
182
+ [id="plugins-{type}s-{plugin}-flush_interval_secs"]
183
+ ===== `flush_interval_secs`
184
+
185
+ * Value type is <<number,number>>
186
+ * Default value is `5`
187
+
188
+ Uploads all data this often even if other upload criteria aren't met. Default: 5s
189
+
190
+ [id="plugins-{type}s-{plugin}-ignore_unknown_values"]
191
+ ===== `ignore_unknown_values`
192
+
193
+ * Value type is <<boolean,boolean>>
194
+ * Default value is `false`
195
+
196
+ Indicates if BigQuery should ignore values that are not represented in the table schema.
197
+ If true, the extra values are discarded.
198
+ If false, BigQuery will reject the records with extra fields and the job will fail.
199
+ The default value is false.
200
+
201
+ NOTE: You may want to add a Logstash filter like the following to remove common fields it adds:
202
+
203
+ [source,ruby]
204
+ ----------------------------------
205
+ mutate {
206
+ remove_field => ["@version","@timestamp","path","host","type", "message"]
207
+ }
208
+ ----------------------------------
209
+
210
+ [id="plugins-{type}s-{plugin}-json_key_file"]
211
+ ===== `json_key_file`
212
+
213
+ * Value type is <<string,string>>
214
+ * Default value is `nil`
215
+
216
+ If logstash is running within Google Compute Engine, the plugin can use
217
+ GCE's Application Default Credentials. Outside of GCE, you will need to
218
+ specify a Service Account JSON key file.
219
+
220
+ [id="plugins-{type}s-{plugin}-json_schema"]
221
+ ===== `json_schema`
222
+
223
+ * Value type is <<hash,hash>>
224
+ * Default value is `nil`
225
+
226
+ Schema for log data as a hash.
227
+ These can include nested records, descriptions, and modes.
228
+
229
+ Example:
230
+ [source,ruby]
231
+ --------------------------
232
+ json_schema => {
233
+ fields => [{
234
+ name => "endpoint"
235
+ type => "STRING"
236
+ description => "Request route"
237
+ }, {
238
+ name => "status"
239
+ type => "INTEGER"
240
+ mode => "NULLABLE"
241
+ }, {
242
+ name => "params"
243
+ type => "RECORD"
244
+ mode => "REPEATED"
245
+ fields => [{
246
+ name => "key"
247
+ type => "STRING"
248
+ }, {
249
+ name => "value"
250
+ type => "STRING"
251
+ }]
252
+ }]
253
+ }
254
+ --------------------------
255
+
256
+ [id="plugins-{type}s-{plugin}-key_password"]
257
+ ===== `key_password`
258
+
259
+ * Value type is <<string,string>>
260
+
261
+ **Deprecated:** this field is no longer needed with `json_key_file` or ADC.
262
+
263
+
264
+ [id="plugins-{type}s-{plugin}-key_path"]
265
+ ===== `key_path`
266
+
267
+ * Value type is <<string,string>>
268
+
269
+ **Obsolete:** The PKCS12 key file format is no longer supported.
270
+ Please use one of the following mechanisms:
271
+
272
+ * https://cloud.google.com/docs/authentication/production[Application Default Credentials (ADC)],
273
+ configured via environment variables on Compute Engine, Kubernetes Engine, App Engine, or
274
+ Cloud Functions.
275
+ * A JSON authentication key file. You can generate them in the console for the service account
276
+ like you did with the `.P12` file or with the following command:
277
+ `gcloud iam service-accounts keys create key.json --iam-account my-sa-123@my-project-123.iam.gserviceaccount.com`
278
+
279
+ [id="plugins-{type}s-{plugin}-project_id"]
280
+ ===== `project_id`
281
+
282
+ * This is a required setting.
283
+ * Value type is <<string,string>>
284
+ * There is no default value for this setting.
285
+
286
+ Google Cloud Project ID (number, not Project Name!).
287
+
288
+ [id="plugins-{type}s-{plugin}-service_account"]
289
+ ===== `service_account`
290
+
291
+ * Value type is <<string,string>>
292
+
293
+ **Deprecated:** this field is no longer used because it is contained in the `json_key_file` or
294
+ the Application Default Credentials (ADC) environment variables.
295
+
296
+ [id="plugins-{type}s-{plugin}-table_prefix"]
297
+ ===== `table_prefix`
298
+
299
+ * Value type is <<string,string>>
300
+ * Default value is `"logstash"`
301
+
302
+ BigQuery table ID prefix to be used when creating new tables for log data.
303
+ Table name will be `<table_prefix><table_separator><date>`
304
+
305
+ [id="plugins-{type}s-{plugin}-table_separator"]
306
+ ===== `table_separator`
307
+
308
+ * Value type is <<string,string>>
309
+ * Default value is `"_"`
310
+
311
+ BigQuery table separator to be added between the table_prefix and the
312
+ date suffix.
313
+
314
+ [id="plugins-{type}s-{plugin}-temp_directory"]
315
+ ===== `temp_directory`
316
+
317
+ * Value type is <<string,string>>
318
+
319
+ **Deprecated:** this field is no longer used.
320
+ Events are uploaded in real-time without being stored to disk.
321
+
322
+ * Events that failed to be uploaded will be stored in <<plugins-{type}s-{plugin}-error_directory>>.
323
+ * There is a small fee to insert data into BigQuery using the streaming API.
324
+ https://cloud.google.com/bigquery/pricing[Pricing Information]
325
+ * This plugin buffers events in-memory, so make sure the flush configurations are appropriate
326
+ for your use-case and consider using
327
+ https://www.elastic.co/guide/en/logstash/current/persistent-queues.html[Logstash Persistent Queues]
328
+
329
+ [id="plugins-{type}s-{plugin}-temp_file_prefix"]
330
+ ===== `temp_file_prefix`
331
+
332
+ * Value type is <<string,string>>
333
+
334
+ **Deprecated:** this field is no longer used.
335
+ Events are uploaded in real-time without being stored to disk.
336
+
337
+ * Events that failed to be uploaded will be stored in <<plugins-{type}s-{plugin}-error_directory>>.
338
+ * There is a small fee to insert data into BigQuery using the streaming API.
339
+ https://cloud.google.com/bigquery/pricing[Pricing Information]
340
+ * This plugin buffers events in-memory, so make sure the flush configurations are appropriate
341
+ for your use-case and consider using
342
+ https://www.elastic.co/guide/en/logstash/current/persistent-queues.html[Logstash Persistent Queues]
343
+
344
+
345
+ [id="plugins-{type}s-{plugin}-common-options"]
346
+ include::{include_path}/{type}.asciidoc[]
347
+
348
+ :default_codec!: