logstash-output-google_cloud_storage 3.0.4 → 3.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 0cf8af9589beeacbd3a73a889dfe8d0397e25204
4
- data.tar.gz: be2f8a97aa3858984669a37119803c79d213ae70
2
+ SHA256:
3
+ metadata.gz: b52d957da3f5bba4f2d7ce3a947d82c6f29e17050aca2d7b9c5b5e6d75f5c519
4
+ data.tar.gz: f2724f44723ce7c75533ba73b4965f0d59f639960c5ffd7e597dfc9e214539a1
5
5
  SHA512:
6
- metadata.gz: 780e33a16e0df8a81e1554c9403c8bcfcbf65e2fc0a1327f4ef750e6b80fe694bd62265aec915438f8f71c5cea1a67e1609c08885c8a3dda6a3aa78f8866be35
7
- data.tar.gz: af43a2d1f1f1f145ae638ff9df5fe122da4e8d8fa574170e4f5d11c9038f1246519e4e089a4665ab98a628a841be5e7c6e42e2080bc42cde27755e33aecbdb7c
6
+ metadata.gz: 86427f0d948eaf48b1fa34fe6dab17f7ab972a91b41cddaaf58a706ca20351a15cd13028ab37387486c72d4c1de25bee1a76009c08ea7dd0309a998e2855cee3
7
+ data.tar.gz: f8d8e6104679fba6713b53024469e150d01243016134022eaebb948c6247da050cdd2adc19c1c3fd53350f4a13e2de3145b7090a6c3b3d9878c5cf8c5743c837
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 3.0.5
2
+ - Docs: Set the default_codec doc attribute.
3
+
1
4
  ## 3.0.4
2
5
  - Fix some documentation issues
3
6
 
data/LICENSE CHANGED
@@ -1,4 +1,4 @@
1
- Copyright (c) 2012–2016 Elasticsearch <http://www.elastic.co>
1
+ Copyright (c) 2012-2018 Elasticsearch <http://www.elastic.co>
2
2
 
3
3
  Licensed under the Apache License, Version 2.0 (the "License");
4
4
  you may not use this file except in compliance with the License.
data/docs/index.asciidoc CHANGED
@@ -1,5 +1,6 @@
1
1
  :plugin: google_cloud_storage
2
2
  :type: output
3
+ :default_codec: plain
3
4
 
4
5
  ///////////////////////////////////////////
5
6
  START - GENERATED VARIABLES, DO NOT EDIT!
@@ -203,4 +204,6 @@ around one hour).
203
204
 
204
205
 
205
206
  [id="plugins-{type}s-{plugin}-common-options"]
206
- include::{include_path}/{type}.asciidoc[]
207
+ include::{include_path}/{type}.asciidoc[]
208
+
209
+ :default_codec!:
@@ -59,6 +59,7 @@ require "zlib"
59
59
  # flush_interval_secs => 2 (optional)
60
60
  # gzip => false (optional)
61
61
  # uploader_interval_secs => 60 (optional)
62
+ # upload_synchronous => false (optional)
62
63
  # }
63
64
  # }
64
65
  # --------------------------
@@ -118,6 +119,17 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
118
119
  # around one hour).
119
120
  config :uploader_interval_secs, :validate => :number, :default => 60
120
121
 
122
+ # When true, files are uploaded by the event processing thread as soon as a file is ready.
123
+ # When false, (the default behaviour), files will be uploaded in a dedicated thread.
124
+ #
125
+ # Enabling this option provides greater likelihood that all generated files will be
126
+ # to GCS, especially in the event of a graceful shutdown of logstash, such as when an
127
+ # input plugin reaches the end of events. This comes at the price of introducing delays
128
+ # in the event processing pipeline as files are uploaded.
129
+ #
130
+ # When this feature is enabled, the uploader_interval_secs option has no effect.
131
+ config :upload_synchronous, :validate => :boolean, :default => false
132
+
121
133
  public
122
134
  def register
123
135
  require "fileutils"
@@ -125,12 +137,19 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
125
137
 
126
138
  @logger.debug("GCS: register plugin")
127
139
 
128
- @upload_queue = Queue.new
129
140
  @last_flush_cycle = Time.now
141
+
142
+ unless upload_synchronous
143
+ initialize_upload_queue()
144
+ end
145
+
130
146
  initialize_temp_directory()
131
147
  initialize_current_log()
132
148
  initialize_google_client()
133
- initialize_uploader()
149
+
150
+ unless upload_synchronous
151
+ @uploader = start_uploader
152
+ end
134
153
 
135
154
  if @gzip
136
155
  @content_type = 'application/gzip'
@@ -162,6 +181,12 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
162
181
  # Close does not guarantee that data is physically written to disk.
163
182
  @temp_file.fsync()
164
183
  @temp_file.close()
184
+
185
+ if upload_synchronous
186
+ upload_object(@temp_file.to_path)
187
+ File.delete(@temp_file.to_path)
188
+ end
189
+
165
190
  initialize_next_log()
166
191
  end
167
192
 
@@ -179,7 +204,15 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
179
204
  @logger.debug("GCS: close method called")
180
205
 
181
206
  @temp_file.fsync()
207
+ filename = @temp_file.to_path
208
+ size = @temp_file.size
182
209
  @temp_file.close()
210
+
211
+ if upload_synchronous && size > 0
212
+ @logger.debug("GCS: uploading last file of #{size.to_s}b")
213
+ upload_object(filename)
214
+ File.delete(filename)
215
+ end
183
216
  end
184
217
 
185
218
  private
@@ -222,42 +255,51 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
222
255
  end
223
256
  end
224
257
 
258
+ def start_uploader
259
+ Thread.new do
260
+ @logger.debug("GCS: starting uploader")
261
+ while true
262
+ upload_from_queue()
263
+ end
264
+ end
265
+ end
225
266
  ##
226
- # Starts thread to upload log files.
267
+ # Uploads log files.
227
268
  #
228
269
  # Uploader is done in a separate thread, not holding the receive method above.
229
- def initialize_uploader
230
- @uploader = Thread.new do
231
- @logger.debug("GCS: starting uploader")
232
- while true
233
- filename = @upload_queue.pop
234
-
235
- # Reenqueue if it is still the current file.
236
- if filename == @temp_file.to_path
237
- if @current_base_path == get_base_path()
238
- @logger.debug("GCS: reenqueue as log file is being currently appended to.",
239
- :filename => filename)
240
- @upload_queue << filename
241
- # If we got here, it means that older files were uploaded, so let's
242
- # wait another minute before checking on this file again.
243
- sleep @uploader_interval_secs
244
- next
245
- else
246
- @logger.debug("GCS: flush and close file to be uploaded.",
247
- :filename => filename)
248
- @temp_file.fsync()
249
- @temp_file.close()
250
- initialize_next_log()
251
- end
252
- end
253
-
254
- upload_object(filename)
255
- @logger.debug("GCS: delete local temporary file ",
270
+ def upload_from_queue
271
+ filename = @upload_queue.pop
272
+
273
+ # Reenqueue if it is still the current file.
274
+ if filename == @temp_file.to_path
275
+ if @current_base_path == get_base_path()
276
+ @logger.debug("GCS: reenqueue as log file is being currently appended to.",
256
277
  :filename => filename)
257
- File.delete(filename)
278
+ @upload_queue << filename
279
+ # If we got here, it means that older files were uploaded, so let's
280
+ # wait another minute before checking on this file again.
258
281
  sleep @uploader_interval_secs
282
+ return
283
+ else
284
+ @logger.debug("GCS: flush and close file to be uploaded.",
285
+ :filename => filename)
286
+ @temp_file.fsync()
287
+ @temp_file.close()
288
+ initialize_next_log()
259
289
  end
260
290
  end
291
+
292
+ if File.stat(filename).size > 0
293
+ upload_object(filename)
294
+ else
295
+ @logger.debug("GCS: file size is zero, skip upload.",
296
+ :filename => filename,
297
+ :filesize => File.stat(filename).size)
298
+ end
299
+ @logger.debug("GCS: delete local temporary file ",
300
+ :filename => filename)
301
+ File.delete(filename)
302
+ sleep @uploader_interval_secs
261
303
  end
262
304
 
263
305
  ##
@@ -319,7 +361,9 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
319
361
  fd = Zlib::GzipWriter.new(fd)
320
362
  end
321
363
  @temp_file = GCSIOWriter.new(fd)
322
- @upload_queue << @temp_file.to_path
364
+ unless upload_synchronous
365
+ @upload_queue << @temp_file.to_path
366
+ end
323
367
  end
324
368
 
325
369
  ##
@@ -374,6 +418,15 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
374
418
  @client.authorization = service_account.authorize
375
419
  end
376
420
 
421
+ # Initialize the queue that harbors files to be uploaded
422
+ def initialize_upload_queue
423
+ @upload_queue = new_upload_queue()
424
+ end
425
+
426
+ def new_upload_queue
427
+ Queue.new
428
+ end
429
+
377
430
  ##
378
431
  # Uploads a local file to the configured bucket.
379
432
  def upload_object(filename)
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-google_cloud_storage'
3
- s.version = '3.0.4'
3
+ s.version = '3.0.5'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "plugin to upload log events to Google Cloud Storage (GCS)"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -1,6 +1,7 @@
1
1
  # encoding: utf-8
2
2
  require_relative "../spec_helper"
3
3
  require "google/api_client"
4
+ require "tempfile"
4
5
 
5
6
  describe LogStash::Outputs::GoogleCloudStorage do
6
7
 
@@ -8,6 +9,9 @@ describe LogStash::Outputs::GoogleCloudStorage do
8
9
  let(:service_account) { double("service-account") }
9
10
  let(:key) { "key" }
10
11
 
12
+ subject { described_class.new(config) }
13
+ let(:config) { {"bucket" => "", "key_path" => "", "service_account" => "", "uploader_interval_secs" => 0.1 } }
14
+
11
15
  before(:each) do
12
16
  allow(Google::APIClient).to receive(:new).and_return(client)
13
17
  allow(client).to receive(:discovered_api).with("storage", "v1")
@@ -18,7 +22,37 @@ describe LogStash::Outputs::GoogleCloudStorage do
18
22
  end
19
23
 
20
24
  it "should register without errors" do
21
- plugin = LogStash::Plugin.lookup("output", "google_cloud_storage").new({"bucket" => "", "key_path" => "", "service_account" => ""})
22
- expect { plugin.register }.to_not raise_error
25
+ expect { subject.register }.to_not raise_error
26
+ end
27
+
28
+ describe "file size based decider for uploading" do
29
+ let(:upload_queue) { Queue.new }
30
+ let(:content) { }
31
+ before(:each) do
32
+ allow(subject).to receive(:new_upload_queue).and_return(upload_queue)
33
+ subject.send(:initialize_upload_queue)
34
+ subject.send(:initialize_temp_directory)
35
+ subject.send(:initialize_current_log)
36
+ current_file = upload_queue.pop
37
+ File.write(current_file, content) if content
38
+ upload_queue.push(current_file)
39
+ subject.send(:initialize_next_log)
40
+ end
41
+
42
+ context "when spooled file is empty" do
43
+ let(:content) { nil }
44
+ it "doesn't get uploaded" do
45
+ expect(subject).to_not receive(:upload_object)
46
+ subject.send(:upload_from_queue)
47
+ end
48
+ end
49
+
50
+ context "when spooled file has content" do
51
+ let(:content) { "hello" }
52
+ it "gets uploaded" do
53
+ expect(subject).to receive(:upload_object)
54
+ subject.send(:upload_from_queue)
55
+ end
56
+ end
23
57
  end
24
58
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-google_cloud_storage
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.4
4
+ version: 3.0.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-08-16 00:00:00.000000000 Z
11
+ date: 2018-04-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -100,7 +100,9 @@ dependencies:
100
100
  - - ">="
101
101
  - !ruby/object:Gem::Version
102
102
  version: '0'
103
- description: This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program
103
+ description: This gem is a Logstash plugin required to be installed on top of the
104
+ Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
105
+ gem is not a stand-alone program
104
106
  email: info@elastic.co
105
107
  executables: []
106
108
  extensions: []
@@ -139,7 +141,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
139
141
  version: '0'
140
142
  requirements: []
141
143
  rubyforge_project:
142
- rubygems_version: 2.4.8
144
+ rubygems_version: 2.6.11
143
145
  signing_key:
144
146
  specification_version: 4
145
147
  summary: plugin to upload log events to Google Cloud Storage (GCS)