logstash-output-google_cloud_storage 3.2.0 → 3.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,8 @@
1
+ ## 3.2.1
2
+ - Refactoring work to add locks to file rotation and writing.
3
+ - Fixes [#2](https://github.com/logstash-plugins/logstash-output-google_cloud_storage/issues/2) - Plugin crashes on file rotation.
4
+ - Fixes [#19](https://github.com/logstash-plugins/logstash-output-google_cloud_storage/issues/19) - Deleted files remain in use by the system eventually filling up disk space.
5
+
1
6
  ## 3.2.0
2
7
  - Change uploads to use a job pool for better performance
3
8
  - Fixes [#22](https://github.com/logstash-plugins/logstash-output-google_cloud_storage/issues/22) - Refactor Job Queue Architecture
@@ -0,0 +1,76 @@
1
+ # encoding: utf-8
2
+ require 'logstash/outputs/gcs/temp_log_file'
3
+ require 'concurrent'
4
+
5
+ module LogStash
6
+ module Outputs
7
+ module Gcs
8
+ class LogRotate
9
+ def initialize(path_factory, max_file_size_bytes, gzip, flush_interval_secs)
10
+ @path_factory = path_factory
11
+ @max_file_size_bytes = max_file_size_bytes
12
+ @gzip = gzip
13
+ @flush_interval_secs = flush_interval_secs
14
+
15
+ @lock = Concurrent::ReentrantReadWriteLock.new
16
+ @rotate_callback = nil
17
+
18
+ rotate_log!
19
+ end
20
+
21
+ # writeln writes a message and carriage-return character to the open
22
+ # log file, rotating and syncing it if necessary.
23
+ #
24
+ # nil messages do not get written, but may cause the log to rotate
25
+ def writeln(message=nil)
26
+ @lock.with_write_lock do
27
+ rotate_log! if should_rotate?
28
+
29
+ @temp_file.write(message, "\n") unless message.nil?
30
+
31
+ @temp_file.fsync if @temp_file.time_since_sync >= @flush_interval_secs
32
+ end
33
+ end
34
+
35
+ # rotate_log! closes the current log (if it exists), notifies the
36
+ # handler, rolls the path over and opens a new log.
37
+ #
38
+ # Invariant: the old log will ALWAYS be closed and a new one will
39
+ # ALWAYS be open at the completion of this function.
40
+ def rotate_log!
41
+ @lock.with_write_lock do
42
+ unless @temp_file.nil?
43
+ @temp_file.close!
44
+ @rotate_callback.call(@temp_file.path) unless @rotate_callback.nil?
45
+ end
46
+
47
+ @path_factory.rotate_path!
48
+
49
+ path = @path_factory.current_path
50
+ @temp_file = LogStash::Outputs::Gcs::LogFileFactory.create(path, @gzip)
51
+ end
52
+ end
53
+
54
+ # on_rotate sets a handler to be called when the log gets rotated.
55
+ # The handler receives the path to the rotated out log as a string.
56
+ def on_rotate(&block)
57
+ @lock.with_write_lock do
58
+ @rotate_callback = block
59
+ end
60
+ end
61
+
62
+ private
63
+
64
+ def should_rotate?
65
+ @lock.with_read_lock do
66
+ path_changed = @path_factory.should_rotate?
67
+ rotate_on_size = @max_file_size_bytes > 0
68
+ too_big = @temp_file.size >= @max_file_size_bytes
69
+
70
+ path_changed || (rotate_on_size && too_big)
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
@@ -30,13 +30,16 @@ module LogStash
30
30
 
31
31
  # Rotates the path to the next one in sequence. If the path has a part number
32
32
  # and the base path (date/hostname) haven't changed the part number is incremented.
33
+ # Returns the path that was rotated out
33
34
  def rotate_path!
35
+ last_path = current_path
36
+
34
37
  @path_lock.synchronize {
35
38
  @part_number = (next_base == current_base) ? @part_number + 1 : 0
36
39
  @current = template_variables
37
40
  }
38
41
 
39
- current_path
42
+ last_path
40
43
  end
41
44
 
42
45
  # Checks if the file is ready to rotate because the timestamp changed.
@@ -0,0 +1,110 @@
1
+ # encoding: utf-8
2
+ require 'zlib'
3
+ require 'concurrent'
4
+ require 'time'
5
+
6
+ module LogStash
7
+ module Outputs
8
+ module Gcs
9
+ # LogFileFactory creates a LogFile according to user specification
10
+ # optionally gzipping it and creating mutexes around modification
11
+ # points.
12
+ class LogFileFactory
13
+ def self.create(path, gzip, synchronize=true)
14
+ lf = LogStash::Outputs::Gcs::PlainLogFile.new(path)
15
+ lf = LogStash::Outputs::Gcs::GzipLogFile.new(lf) if gzip
16
+ lf = LogStash::Outputs::Gcs::SynchronizedLogFile.new(lf) if synchronize
17
+
18
+ lf
19
+ end
20
+ end
21
+
22
+ # PlainLogFile writes events to a plain text file.
23
+ class PlainLogFile
24
+ attr_reader :path, :fd
25
+
26
+ def initialize(path)
27
+ @path = path
28
+ @fd = ::File.new(path, 'a+')
29
+ @last_sync = Time.now
30
+ end
31
+
32
+ def write(*contents)
33
+ contents.each { |c| @fd.write(c) }
34
+ end
35
+
36
+ def fsync
37
+ @fd.fsync
38
+ @last_sync = Time.now
39
+ end
40
+
41
+ def close!
42
+ @fd.fsync
43
+ @fd.close
44
+ end
45
+
46
+ def size
47
+ ::File.stat(@path).size
48
+ end
49
+
50
+ def time_since_sync
51
+ Time.now - @last_sync
52
+ end
53
+ end
54
+
55
+ # GzipLogFile wraps another log file and writes events through it.
56
+ class GzipLogFile
57
+ attr_reader :fd
58
+
59
+ def initialize(child)
60
+ @child = child
61
+ @fd = Zlib::GzipWriter.new(child.fd)
62
+ end
63
+
64
+ def write(*contents)
65
+ contents.each { |c| @fd.write(c) }
66
+ end
67
+
68
+ def fsync
69
+ @fd.flush
70
+ @child.fsync
71
+ end
72
+
73
+ def close!
74
+ fsync
75
+ # The Gzip writer closes the underlying IO after
76
+ # appending the Gzip footer.
77
+ @fd.close
78
+ end
79
+
80
+ def method_missing(method_name, *args, &block)
81
+ @child.send(method_name, *args, &block)
82
+ end
83
+ end
84
+
85
+ # SynchronizedLogFile wraps another log file and uses reentrant locks
86
+ # around its methods to prevent concurrent modification.
87
+ class SynchronizedLogFile
88
+ def initialize(child)
89
+ @child = child
90
+ @lock = Concurrent::ReentrantReadWriteLock.new
91
+ end
92
+
93
+ def time_since_sync
94
+ @lock.with_read_lock { @child.time_since_sync }
95
+ end
96
+
97
+ def path
98
+ @lock.with_read_lock { @child.path }
99
+ end
100
+
101
+ def method_missing(method_name, *args, &block)
102
+ # unless otherwise specified, get a write lock
103
+ @lock.with_write_lock do
104
+ @child.send(method_name, *args, &block)
105
+ end
106
+ end
107
+ end
108
+ end
109
+ end
110
+ end
@@ -21,6 +21,7 @@
21
21
  require "logstash/outputs/base"
22
22
  require "logstash/outputs/gcs/path_factory"
23
23
  require "logstash/outputs/gcs/worker_pool"
24
+ require "logstash/outputs/gcs/log_rotate"
24
25
  require "logstash/namespace"
25
26
  require "logstash/json"
26
27
  require "stud/interval"
@@ -142,107 +143,62 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
142
143
 
143
144
  public
144
145
  def register
145
- require "fileutils"
146
- @logger.debug("GCS: register plugin")
147
- @last_flush_cycle = Time.now
146
+ @logger.debug('Registering Google Cloud Storage plugin')
148
147
 
149
148
  @workers = LogStash::Outputs::Gcs::WorkerPool.new(@max_concurrent_uploads, @upload_synchronous)
150
- initialize_temp_directory()
149
+ initialize_temp_directory
151
150
  initialize_path_factory
152
- open_current_file
151
+ initialize_log_rotater
153
152
 
154
- initialize_google_client()
153
+ initialize_google_client
155
154
 
156
155
  start_uploader
157
156
 
158
- if @gzip
159
- @content_type = 'application/gzip'
160
- else
161
- @content_type = 'text/plain'
162
- end
157
+ @content_type = @gzip ? 'application/gzip' : 'text/plain'
163
158
  end
164
159
 
165
160
  # Method called for each log event. It writes the event to the current output
166
161
  # file, flushing depending on flush interval configuration.
167
162
  public
168
163
  def receive(event)
169
- @logger.debug("GCS: receive method called", :event => event)
164
+ @logger.debug('Received event', :event => event)
170
165
 
171
- if (@output_format == "json")
166
+ if @output_format == 'json'
172
167
  message = LogStash::Json.dump(event.to_hash)
173
168
  else
174
169
  message = event.to_s
175
170
  end
176
171
 
177
- # Time to roll file based on the date pattern? Or is it over the size limit?
178
- initialize_next_log if ready_to_rotate?
179
-
180
- @temp_file.write(message)
181
- @temp_file.write("\n")
182
-
183
- sync_log_file()
184
-
185
- @logger.debug("GCS: event appended to log file",
186
- :filename => File.basename(@temp_file.to_path))
172
+ @log_rotater.writeln(message)
187
173
  end
188
174
 
189
175
  public
190
176
  def close
191
177
  @logger.debug('Stopping the plugin, uploading the remaining files.')
192
-
193
178
  Stud.stop!(@registration_thread) unless @registration_thread.nil?
194
179
 
195
- close_and_upload_current
180
+ # Force rotate the log. If it contains data it will be submitted
181
+ # to the work pool and will be uploaded before the plugin stops.
182
+ @log_rotater.rotate_log!
196
183
  @workers.stop!
197
184
  end
198
185
 
199
186
  private
200
187
 
201
-
202
- def ready_to_rotate?
203
- path_changed = @path_factory.should_rotate?
204
- too_big = @max_file_size_kbytes > 0 && @temp_file.size >= @max_file_size_kbytes * 1024
205
-
206
- path_changed || too_big
207
- end
208
-
209
- ##
210
- # Flushes temporary log file every flush_interval_secs seconds or so.
211
- # This is triggered by events, but if there are no events there's no point
212
- # flushing files anyway.
213
- #
214
- # Inspired by lib/logstash/outputs/file.rb (flush(fd), flush_pending_files)
215
- def sync_log_file
216
- if flush_interval_secs <= 0
217
- @temp_file.fsync()
218
- return
219
- end
220
-
221
- return unless Time.now - @last_flush_cycle >= flush_interval_secs
222
- @temp_file.fsync()
223
- @logger.debug("GCS: flushing file",
224
- :path => @temp_file.to_path,
225
- :fd => @temp_file)
226
- @last_flush_cycle = Time.now
227
- end
228
-
229
188
  ##
230
189
  # Creates temporary directory, if it does not exist.
231
190
  #
232
191
  # A random suffix is appended to the temporary directory
233
192
  def initialize_temp_directory
234
193
  require "stud/temporary"
194
+
235
195
  if @temp_directory.empty?
236
- @temp_directory = Stud::Temporary.directory("logstash-gcs")
237
- @logger.info("GCS: temporary directory generated",
238
- :directory => @temp_directory)
196
+ @temp_directory = Stud::Temporary.directory('logstash-gcs')
239
197
  end
240
198
 
241
- if !(File.directory? @temp_directory)
242
- @logger.debug("GCS: directory doesn't exist. Creating it.",
243
- :directory => @temp_directory)
244
- FileUtils.mkdir_p(@temp_directory)
245
- end
199
+ FileUtils.mkdir_p(@temp_directory) unless File.directory?(@temp_directory)
200
+
201
+ @logger.info("Using temporary directory: #{@temp_directory}")
246
202
  end
247
203
 
248
204
  def initialize_path_factory
@@ -257,44 +213,16 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
257
213
  end
258
214
  end
259
215
 
216
+ # start_uploader periodically sends flush events through the log rotater
260
217
  def start_uploader
261
218
  Thread.new do
262
219
  @registration_thread = Thread.current
263
220
  Stud.interval(@uploader_interval_secs) do
264
- initialize_next_log if ready_to_rotate?
221
+ @log_rotater.writeln(nil)
265
222
  end
266
223
  end
267
224
  end
268
225
 
269
- ##
270
- # Opens current log file and updates @temp_file with an instance of IOWriter.
271
- # This method also adds file to the upload queue.
272
- def open_current_file
273
- path = @path_factory.current_path
274
-
275
- stat = File.stat(path) rescue nil
276
- if stat and stat.ftype == "fifo" and RUBY_PLATFORM == "java"
277
- fd = java.io.FileWriter.new(java.io.File.new(path))
278
- else
279
- fd = File.new(path, "a")
280
- end
281
- if @gzip
282
- fd = Zlib::GzipWriter.new(fd)
283
- end
284
- @temp_file = GCSIOWriter.new(fd)
285
- end
286
-
287
- ##
288
- # Generates new log file name based on configuration options and opens log
289
- # file. If max file size is enabled, part number if incremented in case the
290
- # the base log file name is the same (e.g. log file was not rolled given the
291
- # date pattern).
292
- def initialize_next_log
293
- close_and_upload_current
294
- @path_factory.rotate_path!
295
- open_current_file()
296
- end
297
-
298
226
  ##
299
227
  # Initializes Google Client instantiating client and authorizing access.
300
228
  def initialize_google_client
@@ -340,19 +268,6 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
340
268
  end
341
269
  end
342
270
 
343
- def close_and_upload_current
344
- return if @temp_file.nil?
345
-
346
- filename = @temp_file.to_path
347
- @temp_file.fsync
348
- @temp_file.close
349
- @logger.info("Uploading file: #{filename}")
350
-
351
- @workers.post do
352
- upload_and_delete(filename)
353
- end
354
- end
355
-
356
271
  def upload_and_delete(filename)
357
272
  file_size = File.stat(filename).size
358
273
 
@@ -365,38 +280,16 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
365
280
  @logger.debug('Delete local temporary file', :filename => filename)
366
281
  File.delete(filename)
367
282
  end
368
- end
369
283
 
370
- ##
371
- # Wrapper class that abstracts which IO being used (for instance, regular
372
- # files or GzipWriter.
373
- #
374
- # Inspired by lib/logstash/outputs/file.rb.
375
- class GCSIOWriter
376
- def initialize(io)
377
- @io = io
378
- end
379
- def write(*args)
380
- @io.write(*args)
381
- end
382
- def fsync
383
- if @io.class == Zlib::GzipWriter
384
- @io.flush
385
- @io.to_io.fsync
386
- else
387
- @io.fsync
388
- end
389
- end
390
- def method_missing(method_name, *args, &block)
391
- if @io.respond_to?(method_name)
392
- @io.send(method_name, *args, &block)
393
- else
394
- if @io.class == Zlib::GzipWriter && @io.to_io.respond_to?(method_name)
395
- @io.to_io.send(method_name, *args, &block)
396
- else
397
- super
284
+ def initialize_log_rotater
285
+ max_file_size = @max_file_size_kbytes * 1024
286
+ @log_rotater = LogStash::Outputs::Gcs::LogRotate.new(@path_factory, max_file_size, @gzip, @flush_interval_secs)
287
+
288
+ @log_rotater.on_rotate do |filename|
289
+ @logger.info("Rotated out file: #{filename}")
290
+ @workers.post do
291
+ upload_and_delete(filename)
398
292
  end
399
293
  end
400
294
  end
401
- attr_accessor :active
402
295
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-google_cloud_storage'
3
- s.version = '3.2.0'
3
+ s.version = '3.2.1'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "plugin to upload log events to Google Cloud Storage (GCS)"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -0,0 +1,129 @@
1
+ # encoding: utf-8
2
+ require 'logstash/outputs/gcs/log_rotate'
3
+ require 'logstash/outputs/gcs/path_factory'
4
+ require 'logstash/outputs/gcs/temp_log_file'
5
+
6
+ describe LogStash::Outputs::Gcs::LogRotate do
7
+ let(:tempdir){ Stud::Temporary.directory }
8
+ let(:path_factory) do
9
+ LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
10
+ builder.set_directory tempdir
11
+ builder.set_prefix 'prefix'
12
+ builder.set_include_host true
13
+ builder.set_date_pattern ''
14
+ builder.set_include_part true
15
+ builder.set_include_uuid true
16
+ builder.set_is_gzipped true
17
+ end
18
+ end
19
+ let(:open_file_1) { double('open-temp-1', :size => 5, :path => 'one', :close! => true, :time_since_sync => 10, :fsync => true)}
20
+ let(:open_file_2) { double('open-temp-2', :size => 5, :path => 'two', :close! => true, :time_since_sync => 60, :fsync => true)}
21
+
22
+ describe '#initialize' do
23
+ it 'opens the first file' do
24
+ expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
25
+
26
+ LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30)
27
+ end
28
+ end
29
+
30
+ describe '#writeln' do
31
+ subject { LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30) }
32
+
33
+ it 'does not rotate if size is small and path is the same' do
34
+ expect(path_factory).to receive(:should_rotate?).and_return(false)
35
+ # once for init
36
+ expect(path_factory).to receive(:rotate_path!).once
37
+
38
+ subject.writeln('foo')
39
+ end
40
+
41
+ it 'rotates the file if the size is too big' do
42
+ # once for init, once for writeln
43
+ expect(path_factory).to receive(:rotate_path!).twice
44
+
45
+ subject.writeln('this line is longer than ten characters' * 1000)
46
+ subject.writeln('flush')
47
+ end
48
+
49
+ it 'rotates the file if the path changed' do
50
+ expect(path_factory).to receive(:should_rotate?).and_return(true)
51
+ # once for init, once for writeln
52
+ expect(path_factory).to receive(:rotate_path!).twice
53
+
54
+ subject.writeln('foo')
55
+ end
56
+
57
+ it 'writes the message' do
58
+ expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
59
+ expect(open_file_1).to receive(:write).with('foo', "\n")
60
+
61
+ subject.writeln('foo')
62
+ end
63
+
64
+ it 'does not write nil messages' do
65
+ expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
66
+ expect(open_file_1).not_to receive(:write)
67
+
68
+ subject.writeln(nil)
69
+ end
70
+
71
+ it 'does not fsync if delta less than limit' do
72
+ expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1)
73
+ expect(open_file_1).not_to receive(:fsync)
74
+
75
+ subject.writeln(nil)
76
+ end
77
+
78
+ it 'fsyncs if delta greater than limit' do
79
+ expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_2)
80
+ expect(open_file_2).to receive(:fsync)
81
+
82
+ subject.writeln(nil)
83
+ end
84
+ end
85
+
86
+ describe '#rotate_log!' do
87
+ subject { LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30) }
88
+
89
+ before :each do
90
+ allow(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1, open_file_2)
91
+ end
92
+
93
+ it 'closes the old file' do
94
+ expect(open_file_1).to receive(:close!)
95
+
96
+ subject.rotate_log!
97
+ end
98
+
99
+ it 'calls the callback with the old file name' do
100
+ value = nil
101
+ subject.on_rotate { |old_path| value = old_path }
102
+
103
+ subject.rotate_log!
104
+ expect(value).to eq(open_file_1.path)
105
+ end
106
+
107
+ it 'opens a new file based on the new path' do
108
+ expect(LogStash::Outputs::Gcs::LogFileFactory).to receive(:create).and_return(open_file_1, open_file_2)
109
+ expect(open_file_2).to receive(:write).with('foo', "\n")
110
+
111
+ subject.rotate_log!
112
+ subject.writeln('foo')
113
+ end
114
+ end
115
+
116
+ describe '#on_rotate' do
117
+ subject { LogStash::Outputs::Gcs::LogRotate.new(path_factory, 10, false, 30) }
118
+
119
+ it 'replaces an existing callback' do
120
+ value = :none
121
+
122
+ subject.on_rotate { value = :first }
123
+ subject.on_rotate { value = :second }
124
+
125
+ subject.rotate_log!
126
+ expect(value).to eq(:second)
127
+ end
128
+ end
129
+ end
@@ -125,7 +125,7 @@ describe LogStash::Outputs::Gcs::PathFactory do
125
125
  expect(pf.current_path).to include('part000')
126
126
  end
127
127
 
128
- it 'returns the current_path' do
128
+ it 'returns the path being rotated out' do
129
129
  pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
130
130
  builder.set_directory 'dir'
131
131
  builder.set_prefix 'pre'
@@ -135,8 +135,9 @@ describe LogStash::Outputs::Gcs::PathFactory do
135
135
  builder.set_include_uuid false
136
136
  builder.set_is_gzipped false
137
137
  end
138
+ last = pf.current_path
138
139
  after = pf.rotate_path!
139
- expect(after).to eq(File.join('dir', 'pre_date.part001.log'))
140
+ expect(after).to eq(last)
140
141
  end
141
142
  end
142
143
 
@@ -151,7 +152,7 @@ describe LogStash::Outputs::Gcs::PathFactory do
151
152
  builder.set_include_uuid false
152
153
  builder.set_is_gzipped false
153
154
  end
154
- sleep 0.1
155
+ sleep 1.0
155
156
  expect(pf.should_rotate?).to eq(false)
156
157
  end
157
158
 
@@ -165,7 +166,7 @@ describe LogStash::Outputs::Gcs::PathFactory do
165
166
  builder.set_include_uuid false
166
167
  builder.set_is_gzipped false
167
168
  end
168
- sleep 0.1
169
+ sleep 1.0
169
170
  expect(pf.should_rotate?).to eq(true)
170
171
  end
171
172
  end
@@ -0,0 +1,119 @@
1
+ # encoding: utf-8
2
+ require 'logstash/outputs/gcs/temp_log_file'
3
+ require 'stud/temporary'
4
+ require 'zlib'
5
+
6
+ shared_examples 'a log file' do
7
+ describe '#initialize' do
8
+ it 'opens a file' do
9
+ expect{subject.fd}.to_not raise_error
10
+ expect(subject.fd).to_not be_nil
11
+ end
12
+
13
+ it 'sets the path' do
14
+ expect{subject.path}.to_not raise_error
15
+ expect(subject.path).to_not be_nil
16
+ end
17
+
18
+ it 'sets last sync' do
19
+ expect{subject.time_since_sync}.to_not raise_error
20
+ end
21
+ end
22
+
23
+ describe '#write' do
24
+ it 'writes the content' do
25
+ expect(subject.fd).to receive(:write).with('foo')
26
+ expect(subject.fd).to receive(:write).with("\n")
27
+
28
+ subject.write('foo', "\n")
29
+ end
30
+
31
+ it 'fails if the file is closed' do
32
+ subject.close!
33
+
34
+ expect{ subject.write('foo') }.to raise_error(IOError)
35
+ end
36
+ end
37
+
38
+ describe '#fsync' do
39
+ it 'fails if the file is closed' do
40
+ subject.close!
41
+
42
+ expect{ subject.fsync }.to raise_error(IOError)
43
+ end
44
+ end
45
+
46
+ describe '#close!' do
47
+ it 'fails if the file is closed' do
48
+ subject.close!
49
+
50
+ expect{ subject.close! }.to raise_error(IOError)
51
+ end
52
+ end
53
+
54
+ describe '#size' do
55
+ it 'gets the size of the file on disk' do
56
+ subject.write('hello, world!')
57
+ subject.fsync
58
+
59
+ expect(subject.size).to eq(File.stat(subject.path).size)
60
+ end
61
+
62
+ it 'does not fail if the file is closed' do
63
+ subject.close!
64
+
65
+ expect{ subject.size }.to_not raise_error
66
+ end
67
+ end
68
+
69
+ describe '#time_since_sync' do
70
+ it 'returns a delta' do
71
+ expect(Time).to receive(:now).and_return(30, 40, 50)
72
+
73
+ subject.fsync
74
+
75
+ expect(subject.time_since_sync).to eq(10)
76
+ end
77
+ end
78
+ end
79
+
80
+ describe LogStash::Outputs::Gcs::PlainLogFile do
81
+ let(:tempdir) { Stud::Temporary.directory }
82
+ let(:path) { ::File.join(tempdir, 'logfile.log') }
83
+ subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, false, false) }
84
+
85
+ it_behaves_like 'a log file'
86
+
87
+ it 'creates a valid plain text file' do
88
+ subject.write('Hello, world!')
89
+ subject.close!
90
+ data = File.read(path)
91
+
92
+ expect(data).to eq('Hello, world!')
93
+ end
94
+ end
95
+
96
+ describe LogStash::Outputs::Gcs::GzipLogFile do
97
+ let(:tempdir) { Stud::Temporary.directory }
98
+ let(:path) { ::File.join(tempdir, 'logfile.log') }
99
+ subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, true, false) }
100
+
101
+ it_behaves_like 'a log file'
102
+
103
+ it 'creates a valid gzip' do
104
+ subject.write('Hello, world!')
105
+ subject.close!
106
+
107
+ Zlib::GzipReader.open(path) do |gz|
108
+ expect(gz.read).to eq('Hello, world!')
109
+ end
110
+ end
111
+ end
112
+
113
+ describe LogStash::Outputs::Gcs::SynchronizedLogFile do
114
+ let(:tempdir) { Stud::Temporary.directory }
115
+ let(:path) { ::File.join(tempdir, 'logfile.log') }
116
+ subject { LogStash::Outputs::Gcs::LogFileFactory.create(path, false, true) }
117
+
118
+ it_behaves_like 'a log file'
119
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-google_cloud_storage
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.2.0
4
+ version: 3.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-06-04 00:00:00.000000000 Z
11
+ date: 2018-06-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -129,11 +129,15 @@ files:
129
129
  - NOTICE.TXT
130
130
  - README.md
131
131
  - docs/index.asciidoc
132
+ - lib/logstash/outputs/gcs/log_rotate.rb
132
133
  - lib/logstash/outputs/gcs/path_factory.rb
134
+ - lib/logstash/outputs/gcs/temp_log_file.rb
133
135
  - lib/logstash/outputs/gcs/worker_pool.rb
134
136
  - lib/logstash/outputs/google_cloud_storage.rb
135
137
  - logstash-output-google_cloud_storage.gemspec
138
+ - spec/outputs/gcs/log_rotate_spec.rb
136
139
  - spec/outputs/gcs/path_factory_spec.rb
140
+ - spec/outputs/gcs/temp_log_file_spec.rb
137
141
  - spec/outputs/gcs/worker_pool_spec.rb
138
142
  - spec/outputs/google_cloud_storage_spec.rb
139
143
  - spec/spec_helper.rb
@@ -164,7 +168,9 @@ signing_key:
164
168
  specification_version: 4
165
169
  summary: plugin to upload log events to Google Cloud Storage (GCS)
166
170
  test_files:
171
+ - spec/outputs/gcs/log_rotate_spec.rb
167
172
  - spec/outputs/gcs/path_factory_spec.rb
173
+ - spec/outputs/gcs/temp_log_file_spec.rb
168
174
  - spec/outputs/gcs/worker_pool_spec.rb
169
175
  - spec/outputs/google_cloud_storage_spec.rb
170
176
  - spec/spec_helper.rb
checksums.yaml DELETED
@@ -1,7 +0,0 @@
1
- ---
2
- SHA256:
3
- metadata.gz: ef57485cd166eb205939da40bb4db73428955388af8e5c13d313852eb8c297c7
4
- data.tar.gz: 8d3e0f581f611a9c7148ecc9f871b54e8c7ebf356ea3da39a3fa0a187d710184
5
- SHA512:
6
- metadata.gz: 022b0a599c17c5a9dc062093662556f2cfd15c7d2345527b92a4b9bda4fb2fa3756e837bbe471a60bf2034ce2a6b059f76b2ec01c1d27ba6b54575f628e46bc8
7
- data.tar.gz: b603dea8edb673e1a1e0c72a5431d5d915ade37d1f6164fb3b40882bdecb936a6959ec16e6724d239ca26e87b651c6a8602cfbd439d82d824bb14781611a8bd0