logstash-output-google_bigquery 4.1.0-java → 4.1.5-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 31a414a63c70fc7b35c690e076db1d1595785c79187a94e361b50d8632161496
4
- data.tar.gz: 70622ec5c509cd8ac319e4e021b04086a841952b51619dd99fdaa4e2ac21c0a6
3
+ metadata.gz: e9a03bff39381bcad84b1c64db1d06cbe8f213aed0368e1badcd450315a3e92c
4
+ data.tar.gz: e344def3bbaed14a3fa75dd386868647d658adad6b89619d149ca8fdc5dceae6
5
5
  SHA512:
6
- metadata.gz: b0157e8e33e82c5532bf13cb758ef74d55b05c5ee32185ba53ae0f3dfefbfbc34bcd0df8eac2d1c024b01be93c034ad923e15741df31cb37a84746e944ffe69f
7
- data.tar.gz: 31a7c30f4a4bd0d5b95cfec1f4e569961263527a81f8d92b97c0d9ec04dcff8b73c4f5fe4c118cc307543a12958d0986a33ea35e9e45f3c89e7363dcf8a501d5
6
+ metadata.gz: 56b85e87ff9547aa4b1ed46fbc9cc8b5894d9e92d4d071cb89b009a0e4f773be3508f0a7ac9b5e989473a7e4323b510ff9cd25e1329188408d6972b62ecfe5ac
7
+ data.tar.gz: 22dd3f9644a96e5bc3ffd9dac50b008b2ef8568a5499e0c505016ee2b3dec66a8d5b644276af04be757e81487e46d9b937db180cd716964093408827c7ce2762
@@ -1,3 +1,18 @@
1
+ ## 4.1.5
2
+ - [DOC] Updated links to use shared attributes [#61](https://github.com/logstash-plugins/logstash-output-google_bigquery/pull/61)
3
+
4
+ ## 4.1.4
5
+ - Changed concurrency to :shared and publish outside of synchronized code [#60](https://github.com/logstash-plugins/logstash-output-google_bigquery/pull/60)
6
+
7
+ ## 4.1.3
8
+ - Fixed documentation issue where malformed asciidoc caused text to be lost [#53](https://github.com/logstash-plugins/logstash-output-google_bigquery/pull/53)
9
+
10
+ ## 4.1.2
11
+ - Fixed issue where Logstash shutdown could cause data loss due to not flushing buffers on close [#52](https://github.com/logstash-plugins/logstash-output-google_bigquery/pull/52)
12
+
13
+ ## 4.1.1
14
+ - Fixed inaccuracies in documentation [#46](https://github.com/logstash-plugins/logstash-output-google_bigquery/pull/46)
15
+
1
16
  ## 4.1.0
2
17
  - Added `skip_invalid_rows` configuration which will insert all valid rows of a BigQuery insert
3
18
  and skip any invalid ones.
data/LICENSE CHANGED
@@ -1,13 +1,202 @@
1
- Copyright (c) 2012-2018 Elasticsearch <http://www.elastic.co>
2
1
 
3
- Licensed under the Apache License, Version 2.0 (the "License");
4
- you may not use this file except in compliance with the License.
5
- You may obtain a copy of the License at
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
6
5
 
7
- http://www.apache.org/licenses/LICENSE-2.0
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
8
7
 
9
- Unless required by applicable law or agreed to in writing, software
10
- distributed under the License is distributed on an "AS IS" BASIS,
11
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- See the License for the specific language governing permissions and
13
- limitations under the License.
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright 2020 Elastic and contributors
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Logstash Plugin
2
2
 
3
- [![Travis Build Status](https://travis-ci.org/logstash-plugins/logstash-output-google_bigquery.svg)](https://travis-ci.org/logstash-plugins/logstash-output-google_bigquery)
3
+ [![Travis Build Status](https://travis-ci.com/logstash-plugins/logstash-output-google_bigquery.svg)](https://travis-ci.com/logstash-plugins/logstash-output-google_bigquery)
4
4
 
5
5
  This is a plugin for [Logstash](https://github.com/elastic/logstash).
6
6
 
@@ -23,25 +23,24 @@ include::{include_path}/plugin_header.asciidoc[]
23
23
 
24
24
  ===== Summary
25
25
 
26
- This plugin uploads events to Google BigQuery using the streaming API
27
- so data can become available nearly immediately.
26
+ This Logstash plugin uploads events to Google BigQuery using the streaming API
27
+ so data can become available to query nearly immediately.
28
28
 
29
29
  You can configure it to flush periodically, after N events or after
30
30
  a certain amount of data is ingested.
31
31
 
32
32
  ===== Environment Configuration
33
33
 
34
- You must enable BigQuery on your Google Cloud Storage (GCS) account and create a dataset to
34
+ You must enable BigQuery on your Google Cloud account and create a dataset to
35
35
  hold the tables this plugin generates.
36
36
 
37
- You must also grant the service account this plugin uses access to
38
- the dataset.
37
+ You must also grant the service account this plugin uses access to the dataset.
39
38
 
40
- You can use https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html[Logstash conditionals]
39
+ You can use {logstash-ref}/event-dependent-configuration.html[Logstash conditionals]
41
40
  and multiple configuration blocks to upload events with different structures.
42
41
 
43
42
  ===== Usage
44
- This is an example of logstash config:
43
+ This is an example of Logstash config:
45
44
 
46
45
  [source,ruby]
47
46
  --------------------------
@@ -59,21 +58,23 @@ output {
59
58
  --------------------------
60
59
 
61
60
  <1> Specify either a csv_schema or a json_schema.
62
-
63
61
  <2> If the key is not used, then the plugin tries to find
64
62
  https://cloud.google.com/docs/authentication/production[Application Default Credentials]
65
63
 
66
64
  ===== Considerations
67
65
 
68
- * There is a small fee to insert data into BigQuery using the streaming API
66
+ * There is a small fee to insert data into BigQuery using the streaming API.
69
67
  * This plugin buffers events in-memory, so make sure the flush configurations are appropriate
70
68
  for your use-case and consider using
71
- https://www.elastic.co/guide/en/logstash/current/persistent-queues.html[Logstash Persistent Queues]
69
+ {logstash-ref}/persistent-queues.html[Logstash Persistent Queues].
70
+ * Events will be flushed when <<plugins-{type}s-{plugin}-batch_size>>, <<plugins-{type}s-{plugin}-batch_size_bytes>>, or <<plugins-{type}s-{plugin}-flush_interval_secs>> is met, whatever comes first.
71
+ If you notice a delay in your processing or low throughput, try adjusting those settings.
72
72
 
73
73
  ===== Additional Resources
74
74
 
75
75
  * https://cloud.google.com/docs/authentication/production[Application Default Credentials (ADC) Overview]
76
76
  * https://cloud.google.com/bigquery/[BigQuery Introduction]
77
+ * https://cloud.google.com/bigquery/quotas[BigQuery Quotas and Limits]
77
78
  * https://cloud.google.com/bigquery/docs/schemas[BigQuery Schema Formats and Types]
78
79
 
79
80
  [id="plugins-{type}s-{plugin}-options"]
@@ -120,7 +121,12 @@ added[4.0.0]
120
121
  * Value type is <<number,number>>
121
122
  * Default value is `128`
122
123
 
123
- The number of messages to upload at a single time. (< 1000, default: 128)
124
+ The maximum number of messages to upload at a single time.
125
+ This number must be < 10,000.
126
+ Batching can increase performance and throughput to a point, but at the cost of per-request latency.
127
+ Too few rows per request and the overhead of each request can make ingestion inefficient.
128
+ Too many rows per request and the throughput may drop.
129
+ BigQuery recommends using about 500 rows per request, but experimentation with representative data (schema and data sizes) will help you determine the ideal batch size.
124
130
 
125
131
  [id="plugins-{type}s-{plugin}-batch_size_bytes"]
126
132
  ===== `batch_size_bytes`
@@ -130,10 +136,11 @@ added[4.0.0]
130
136
  * Value type is <<number,number>>
131
137
  * Default value is `1_000_000`
132
138
 
133
- An approximate number of bytes to upload as part of a batch. Default: 1MB
139
+ An approximate number of bytes to upload as part of a batch.
140
+ This number should be < 10MB or inserts may fail.
134
141
 
135
142
  [id="plugins-{type}s-{plugin}-csv_schema"]
136
- ===== `csv_schema`
143
+ ===== `csv_schema`
137
144
 
138
145
  * Value type is <<string,string>>
139
146
  * Default value is `nil`
@@ -142,7 +149,7 @@ Schema for log data. It must follow the format `name1:type1(,name2:type2)*`.
142
149
  For example, `path:STRING,status:INTEGER,score:FLOAT`.
143
150
 
144
151
  [id="plugins-{type}s-{plugin}-dataset"]
145
- ===== `dataset`
152
+ ===== `dataset`
146
153
 
147
154
  * This is a required setting.
148
155
  * Value type is <<string,string>>
@@ -151,7 +158,7 @@ For example, `path:STRING,status:INTEGER,score:FLOAT`.
151
158
  The BigQuery dataset the tables for the events will be added to.
152
159
 
153
160
  [id="plugins-{type}s-{plugin}-date_pattern"]
154
- ===== `date_pattern`
161
+ ===== `date_pattern`
155
162
 
156
163
  * Value type is <<string,string>>
157
164
  * Default value is `"%Y-%m-%dT%H:00"`
@@ -187,15 +194,16 @@ transparently upload to a GCS bucket.
187
194
  Files names follow the pattern `[table name]-[UNIX timestamp].log`
188
195
 
189
196
  [id="plugins-{type}s-{plugin}-flush_interval_secs"]
190
- ===== `flush_interval_secs`
197
+ ===== `flush_interval_secs`
191
198
 
192
199
  * Value type is <<number,number>>
193
200
  * Default value is `5`
194
201
 
195
- Uploads all data this often even if other upload criteria aren't met. Default: 5s
202
+ Uploads all data this often even if other upload criteria aren't met.
203
+
196
204
 
197
205
  [id="plugins-{type}s-{plugin}-ignore_unknown_values"]
198
- ===== `ignore_unknown_values`
206
+ ===== `ignore_unknown_values`
199
207
 
200
208
  * Value type is <<boolean,boolean>>
201
209
  * Default value is `false`
@@ -217,17 +225,17 @@ mutate {
217
225
  [id="plugins-{type}s-{plugin}-json_key_file"]
218
226
  ===== `json_key_file`
219
227
 
220
- added[4.0.0, Replaces <<plugins-{type}s-{plugin}-key_password>>, <<plugins-{type}s-{plugin}-key_path>> and <<plugins-{type}s-{plugin}-service_account>>]
228
+ added[4.0.0, "Replaces <<plugins-{type}s-{plugin}-key_password>>, <<plugins-{type}s-{plugin}-key_path>> and <<plugins-{type}s-{plugin}-service_account>>."]
221
229
 
222
230
  * Value type is <<string,string>>
223
231
  * Default value is `nil`
224
232
 
225
- If logstash is running within Google Compute Engine, the plugin can use
233
+ If Logstash is running within Google Compute Engine, the plugin can use
226
234
  GCE's Application Default Credentials. Outside of GCE, you will need to
227
235
  specify a Service Account JSON key file.
228
236
 
229
237
  [id="plugins-{type}s-{plugin}-json_schema"]
230
- ===== `json_schema`
238
+ ===== `json_schema`
231
239
 
232
240
  * Value type is <<hash,hash>>
233
241
  * Default value is `nil`
@@ -287,7 +295,7 @@ Please use one of the following mechanisms:
287
295
  `gcloud iam service-accounts keys create key.json --iam-account my-sa-123@my-project-123.iam.gserviceaccount.com`
288
296
 
289
297
  [id="plugins-{type}s-{plugin}-project_id"]
290
- ===== `project_id`
298
+ ===== `project_id`
291
299
 
292
300
  * This is a required setting.
293
301
  * Value type is <<string,string>>
@@ -314,7 +322,7 @@ Insert all valid rows of a request, even if invalid rows exist.
314
322
  The default value is false, which causes the entire request to fail if any invalid rows exist.
315
323
 
316
324
  [id="plugins-{type}s-{plugin}-table_prefix"]
317
- ===== `table_prefix`
325
+ ===== `table_prefix`
318
326
 
319
327
  * Value type is <<string,string>>
320
328
  * Default value is `"logstash"`
@@ -323,7 +331,7 @@ BigQuery table ID prefix to be used when creating new tables for log data.
323
331
  Table name will be `<table_prefix><table_separator><date>`
324
332
 
325
333
  [id="plugins-{type}s-{plugin}-table_separator"]
326
- ===== `table_separator`
334
+ ===== `table_separator`
327
335
 
328
336
  * Value type is <<string,string>>
329
337
  * Default value is `"_"`
@@ -361,4 +369,4 @@ around one hour).
361
369
  [id="plugins-{type}s-{plugin}-common-options"]
362
370
  include::{include_path}/{type}.asciidoc[]
363
371
 
364
- :default_codec!:
372
+ :default_codec!:
@@ -35,6 +35,7 @@ module LogStash
35
35
  def enqueue(message)
36
36
  @lock.write_lock.lock
37
37
 
38
+ orig = nil
38
39
  begin
39
40
  is_flush_request = message.nil?
40
41
 
@@ -49,15 +50,14 @@ module LogStash
49
50
  if is_flush_request || length_met || size_met
50
51
  orig = @batch
51
52
  clear
52
-
53
- yield(orig) if block_given?
54
- return orig
55
53
  end
56
54
 
57
- nil
58
55
  ensure
59
56
  @lock.write_lock.unlock
60
57
  end
58
+
59
+ yield(orig) if block_given? && !orig.nil?
60
+ return orig
61
61
  end
62
62
 
63
63
  # removes all elements from the batch
@@ -7,6 +7,7 @@ require 'logstash/outputs/bigquery/schema'
7
7
 
8
8
  require 'time'
9
9
  require 'fileutils'
10
+ require 'concurrent'
10
11
 
11
12
  #
12
13
  # === Summary
@@ -67,7 +68,7 @@ require 'fileutils'
67
68
  class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
68
69
  config_name 'google_bigquery'
69
70
 
70
- concurrency :single
71
+ concurrency :shared
71
72
 
72
73
  # Google Cloud Project ID (number, not Project Name!).
73
74
  config :project_id, validate: :string, required: true
@@ -181,6 +182,7 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
181
182
  @schema = LogStash::Outputs::BigQuery::Schema.parse_csv_or_json @csv_schema, @json_schema
182
183
  @bq_client = LogStash::Outputs::BigQuery::StreamingClient.new @json_key_file, @project_id, @logger
183
184
  @batcher = LogStash::Outputs::BigQuery::Batcher.new @batch_size, @batch_size_bytes
185
+ @stopping = Concurrent::AtomicBoolean.new(false)
184
186
 
185
187
  init_batcher_flush_thread
186
188
  end
@@ -274,11 +276,24 @@ class LogStash::Outputs::GoogleBigQuery < LogStash::Outputs::Base
274
276
 
275
277
  def init_batcher_flush_thread
276
278
  @flush_thread = Thread.new do
277
- loop do
278
- sleep @flush_interval_secs
279
+ until stopping?
280
+ Stud.stoppable_sleep(@flush_interval_secs) { stopping? }
279
281
 
280
282
  @batcher.enqueue(nil) { |batch| publish(batch) }
281
283
  end
282
284
  end
283
285
  end
286
+
287
+ def stopping?
288
+ @stopping.value
289
+ end
290
+
291
+ def close
292
+ @stopping.make_true
293
+ @flush_thread.wakeup
294
+ @flush_thread.join
295
+ # Final flush to publish any events published if a pipeline receives a shutdown signal after flush thread
296
+ # has begun flushing.
297
+ @batcher.enqueue(nil) { |batch| publish(batch) }
298
+ end
284
299
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-google_bigquery'
3
- s.version = '4.1.0'
3
+ s.version = '4.1.5'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Writes events to Google BigQuery"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-google_bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.1.0
4
+ version: 4.1.5
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-09-07 00:00:00.000000000 Z
11
+ date: 2021-01-15 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -17,8 +17,8 @@ dependencies:
17
17
  - !ruby/object:Gem::Version
18
18
  version: '0'
19
19
  name: logstash-codec-plain
20
- prerelease: false
21
20
  type: :runtime
21
+ prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - ">="
@@ -31,8 +31,8 @@ dependencies:
31
31
  - !ruby/object:Gem::Version
32
32
  version: '2'
33
33
  name: mime-types
34
- prerelease: false
35
34
  type: :runtime
35
+ prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
38
  - - "~>"
@@ -48,8 +48,8 @@ dependencies:
48
48
  - !ruby/object:Gem::Version
49
49
  version: '2.99'
50
50
  name: logstash-core-plugin-api
51
- prerelease: false
52
51
  type: :runtime
52
+ prerelease: false
53
53
  version_requirements: !ruby/object:Gem::Requirement
54
54
  requirements:
55
55
  - - ">="
@@ -65,8 +65,8 @@ dependencies:
65
65
  - !ruby/object:Gem::Version
66
66
  version: '0'
67
67
  name: logstash-devutils
68
- prerelease: false
69
68
  type: :development
69
+ prerelease: false
70
70
  version_requirements: !ruby/object:Gem::Requirement
71
71
  requirements:
72
72
  - - ">="
@@ -79,8 +79,8 @@ dependencies:
79
79
  - !ruby/object:Gem::Version
80
80
  version: 0.3.4
81
81
  name: jar-dependencies
82
- prerelease: false
83
82
  type: :development
83
+ prerelease: false
84
84
  version_requirements: !ruby/object:Gem::Requirement
85
85
  requirements:
86
86
  - - "~>"
@@ -169,8 +169,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
169
169
  - !ruby/object:Gem::Version
170
170
  version: '0'
171
171
  requirements: []
172
- rubyforge_project:
173
- rubygems_version: 2.6.13
172
+ rubygems_version: 3.0.6
174
173
  signing_key:
175
174
  specification_version: 4
176
175
  summary: Writes events to Google BigQuery