logstash-output-google_cloud_storage 3.0.5 → 3.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b52d957da3f5bba4f2d7ce3a947d82c6f29e17050aca2d7b9c5b5e6d75f5c519
4
- data.tar.gz: f2724f44723ce7c75533ba73b4965f0d59f639960c5ffd7e597dfc9e214539a1
3
+ metadata.gz: a9181dddb40215cd78a1b07d3b327df10e7dd678b3eab3d34f511a84f11a92a6
4
+ data.tar.gz: 4696f52738d5e20466b8639565a7eeee646b19e27e0421fc39c7cc283359a07b
5
5
  SHA512:
6
- metadata.gz: 86427f0d948eaf48b1fa34fe6dab17f7ab972a91b41cddaaf58a706ca20351a15cd13028ab37387486c72d4c1de25bee1a76009c08ea7dd0309a998e2855cee3
7
- data.tar.gz: f8d8e6104679fba6713b53024469e150d01243016134022eaebb948c6247da050cdd2adc19c1c3fd53350f4a13e2de3145b7090a6c3b3d9878c5cf8c5743c837
6
+ metadata.gz: 93b09c005fc9cd4afdca9678b14949f35ad5d07ec92cb5f1ffb2ab20d8228f6e46e72e197846d1704cc2575f9bbf8c603e03ebf355b0423f6de6e42f6cb698dc
7
+ data.tar.gz: 05e5bd961335f99458a4c46d5b271bcfd1279c6349817a483c9c65c1c62b0f1efeac48221b5c3c0e8869e0603caedcb5ece6b1d6da21a21641341fd306769549
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 3.1.0
2
+ - Add support for disabling hostname in the log file names
3
+ - Add support for adding a UUID to the log file names
4
+
1
5
  ## 3.0.5
2
6
  - Docs: Set the default_codec doc attribute.
3
7
 
data/docs/index.asciidoc CHANGED
@@ -57,6 +57,8 @@ output {
57
57
  flush_interval_secs => 2 (optional)
58
58
  gzip => false (optional)
59
59
  uploader_interval_secs => 60 (optional)
60
+ include_uuid => true (optional)
61
+ include_hostname => true (optional)
60
62
  }
61
63
  }
62
64
  --------------------------
@@ -81,6 +83,8 @@ This plugin supports the following configuration options plus the <<plugins-{typ
81
83
  | <<plugins-{type}s-{plugin}-date_pattern>> |<<string,string>>|No
82
84
  | <<plugins-{type}s-{plugin}-flush_interval_secs>> |<<number,number>>|No
83
85
  | <<plugins-{type}s-{plugin}-gzip>> |<<boolean,boolean>>|No
86
+ | <<plugins-{type}s-{plugin}-include_hostname>> |<<boolean,boolean>>|No
87
+ | <<plugins-{type}s-{plugin}-include_uuid>> |<<boolean,boolean>>|No
84
88
  | <<plugins-{type}s-{plugin}-key_password>> |<<string,string>>|No
85
89
  | <<plugins-{type}s-{plugin}-key_path>> |<<string,string>>|Yes
86
90
  | <<plugins-{type}s-{plugin}-log_file_prefix>> |<<string,string>>|No
@@ -131,6 +135,32 @@ on every message.
131
135
 
132
136
  Gzip output stream when writing events to log files.
133
137
 
138
+ [id="plugins-{type}s-{plugin}-include_hostname"]
139
+ ===== `include_hostname`
140
+
141
+ added[3.1.0]
142
+
143
+ * Value type is <<boolean,boolean>>
144
+ * Default value is `true`
145
+
146
+ Should the hostname be included in the file name?
147
+ You may want to turn this off for privacy reasons or if you are running multiple
148
+ instances of Logstash and need to match the files you create with a simple glob
149
+ such as if you wanted to import files to BigQuery.
150
+
151
+
152
+ [id="plugins-{type}s-{plugin}-include_uuid"]
153
+ ===== `include_uuid`
154
+
155
+ added[3.1.0]
156
+
157
+ * Value type is <<boolean,boolean>>
158
+ * Default value is `false`
159
+
160
+ Adds a UUID to the end of a file name.
161
+ You may want to enable this feature so files don't clobber one another if you're
162
+ running multiple instances of Logstash or if you expect frequent node restarts.
163
+
134
164
  [id="plugins-{type}s-{plugin}-key_password"]
135
165
  ===== `key_password`
136
166
 
@@ -0,0 +1,116 @@
1
+ # encoding: utf-8
2
+ require 'thread'
3
+
4
+ module LogStash
5
+ module Outputs
6
+ module Gcs
7
+ # PathFactory creates paths for rotating files.
8
+ class PathFactory
9
+ def initialize(directory, prefix, include_host, date_pattern, include_part, include_uuid, is_gzipped)
10
+ @path_lock = Mutex.new
11
+
12
+ pattern = '%{prefix}'
13
+ pattern += '_%{host}' if include_host
14
+ pattern += '_%{date}'
15
+ @base_pattern = pattern
16
+
17
+ pattern += '.part%{partf}' if include_part
18
+ pattern += '.%{uuid}' if include_uuid
19
+ pattern += '.log'
20
+ pattern += '.gz' if is_gzipped
21
+ @pattern = pattern
22
+
23
+ @prefix = prefix
24
+ @directory = directory
25
+ @date_pattern = date_pattern
26
+
27
+ @part_number = starting_part
28
+ @current = template_variables
29
+ end
30
+
31
+ # Rotates the path to the next one in sequence. If the path has a part number
32
+ # and the base path (date/hostname) haven't changed the part number is incremented.
33
+ def rotate_path!
34
+ @path_lock.synchronize {
35
+ @part_number = (next_base == current_base) ? @part_number + 1 : 0
36
+ @current = template_variables
37
+ }
38
+
39
+ current_path
40
+ end
41
+
42
+ # Checks if the file is ready to rotate because the timestamp changed.
43
+ def should_rotate?
44
+ @path_lock.synchronize {
45
+ next_base != current_base
46
+ }
47
+ end
48
+
49
+ # Returns the full path to the current file including parent directory.
50
+ def current_path(vars=nil)
51
+ @path_lock.synchronize {
52
+ filename = @pattern % (vars || @current)
53
+ ::File.join(@directory, filename)
54
+ }
55
+ end
56
+
57
+ private
58
+
59
+ # search through the directory for a file with the same base, and if it exists,
60
+ # set our part to be the max + 1 so we don't clobber existing files.
61
+ def starting_part
62
+ return 0 unless ::File.directory? @directory
63
+
64
+ base_path = ::File.join(@directory, next_base)
65
+
66
+ part_numbers = Dir.glob(base_path + '.part*').map do |item|
67
+ match = /^.*\.part(?<part_num>\d+).*$/.match(item)
68
+ next if match.nil?
69
+ match[:part_num].to_i
70
+ end
71
+
72
+ part_numbers.any? ? part_numbers.max + 1 : 0
73
+ end
74
+
75
+ def template_variables
76
+ {
77
+ prefix: @prefix,
78
+ host: Socket.gethostname,
79
+ date: Time.now.strftime(@date_pattern),
80
+ partf: '%03d' % @part_number,
81
+ uuid: SecureRandom.uuid
82
+ }
83
+ end
84
+
85
+ def next_base
86
+ @base_pattern % template_variables
87
+ end
88
+
89
+ def current_base
90
+ @base_pattern % @current
91
+ end
92
+ end
93
+
94
+ # PathFactoryBuilder makes the long PathFactory constructor chain more readable.
95
+ class PathFactoryBuilder
96
+ def self.build
97
+ builder = new
98
+ yield builder
99
+ builder.build_path_factory
100
+ end
101
+
102
+ def self.builder_setter(*names)
103
+ names.each do |name|
104
+ define_method("set_#{name}") {|arg| instance_variable_set("@#{name}", arg)}
105
+ end
106
+ end
107
+
108
+ builder_setter :directory, :prefix, :include_host, :date_pattern, :include_part, :include_uuid, :is_gzipped
109
+
110
+ def build_path_factory
111
+ PathFactory.new(@directory, @prefix, @include_host, @date_pattern, @include_part, @include_uuid, @is_gzipped)
112
+ end
113
+ end
114
+ end
115
+ end
116
+ end
@@ -19,6 +19,7 @@
19
19
  # limitations under the License.
20
20
  # -----
21
21
  require "logstash/outputs/base"
22
+ require "logstash/outputs/gcs/path_factory"
22
23
  require "logstash/namespace"
23
24
  require "logstash/json"
24
25
  require "zlib"
@@ -119,6 +120,12 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
119
120
  # around one hour).
120
121
  config :uploader_interval_secs, :validate => :number, :default => 60
121
122
 
123
+ # Should the hostname be included in the file name?
124
+ config :include_hostname, :validate => :boolean, :default => true
125
+
126
+ # Should a UUID be included in the file name?
127
+ config :include_uuid, :validate => :boolean, :default => false
128
+
122
129
  # When true, files are uploaded by the event processing thread as soon as a file is ready.
123
130
  # When false, (the default behaviour), files will be uploaded in a dedicated thread.
124
131
  #
@@ -144,7 +151,9 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
144
151
  end
145
152
 
146
153
  initialize_temp_directory()
147
- initialize_current_log()
154
+ initialize_path_factory
155
+ open_current_file
156
+
148
157
  initialize_google_client()
149
158
 
150
159
  unless upload_synchronous
@@ -170,10 +179,8 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
170
179
  message = event.to_s
171
180
  end
172
181
 
173
- new_base_path = get_base_path()
174
-
175
182
  # Time to roll file based on the date pattern? Or is it over the size limit?
176
- if (@current_base_path != new_base_path || (@max_file_size_kbytes > 0 && @temp_file.size >= @max_file_size_kbytes * 1024))
183
+ if (@path_factory.should_rotate? || (@max_file_size_kbytes > 0 && @temp_file.size >= @max_file_size_kbytes * 1024))
177
184
  @logger.debug("GCS: log file will be closed and uploaded",
178
185
  :filename => File.basename(@temp_file.to_path),
179
186
  :size => @temp_file.size.to_s,
@@ -255,6 +262,18 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
255
262
  end
256
263
  end
257
264
 
265
+ def initialize_path_factory
266
+ @path_factory = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
267
+ builder.set_directory @temp_directory
268
+ builder.set_prefix @log_file_prefix
269
+ builder.set_include_host @include_hostname
270
+ builder.set_date_pattern @date_pattern
271
+ builder.set_include_part(@max_file_size_kbytes > 0)
272
+ builder.set_include_uuid @include_uuid
273
+ builder.set_is_gzipped @gzip
274
+ end
275
+ end
276
+
258
277
  def start_uploader
259
278
  Thread.new do
260
279
  @logger.debug("GCS: starting uploader")
@@ -272,7 +291,7 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
272
291
 
273
292
  # Reenqueue if it is still the current file.
274
293
  if filename == @temp_file.to_path
275
- if @current_base_path == get_base_path()
294
+ if !@path_factory.should_rotate?
276
295
  @logger.debug("GCS: reenqueue as log file is being currently appended to.",
277
296
  :filename => filename)
278
297
  @upload_queue << filename
@@ -299,58 +318,14 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
299
318
  @logger.debug("GCS: delete local temporary file ",
300
319
  :filename => filename)
301
320
  File.delete(filename)
302
- sleep @uploader_interval_secs
303
- end
304
-
305
- ##
306
- # Returns base path to log file that is invariant regardless of whether
307
- # max file or gzip options.
308
- def get_base_path
309
- return @temp_directory + File::SEPARATOR + @log_file_prefix + "_" +
310
- Socket.gethostname() + "_" + Time.now.strftime(@date_pattern)
311
- end
312
-
313
- ##
314
- # Returns log file suffix, which will vary depending on whether gzip is
315
- # enabled.
316
- def get_suffix
317
- return @gzip ? ".log.gz" : ".log"
318
- end
319
-
320
- ##
321
- # Returns full path to the log file based on global variables (like
322
- # current_base_path) and configuration options (max file size and gzip
323
- # enabled).
324
- def get_full_path
325
- if @max_file_size_kbytes > 0
326
- return @current_base_path + ".part" + ("%03d" % @size_counter) + get_suffix()
327
- else
328
- return @current_base_path + get_suffix()
329
- end
330
- end
331
-
332
- ##
333
- # Returns latest part number for a base path. This method checks all existing
334
- # log files in order to find the highest part number, so this file can be used
335
- # for appending log events.
336
- #
337
- # Only applicable if max file size is enabled.
338
- def get_latest_part_number(base_path)
339
- part_numbers = Dir.glob(base_path + ".part*" + get_suffix()).map do |item|
340
- match = /^.*\.part(?<part_num>\d+)#{get_suffix()}$/.match(item)
341
- next if match.nil?
342
- match[:part_num].to_i
343
- end
344
-
345
- return part_numbers.max if part_numbers.any?
346
- 0
347
321
  end
348
322
 
349
323
  ##
350
324
  # Opens current log file and updates @temp_file with an instance of IOWriter.
351
325
  # This method also adds file to the upload queue.
352
326
  def open_current_file()
353
- path = get_full_path()
327
+ path = @path_factory.current_path
328
+
354
329
  stat = File.stat(path) rescue nil
355
330
  if stat and stat.ftype == "fifo" and RUBY_PLATFORM == "java"
356
331
  fd = java.io.FileWriter.new(java.io.File.new(path))
@@ -366,37 +341,13 @@ class LogStash::Outputs::GoogleCloudStorage < LogStash::Outputs::Base
366
341
  end
367
342
  end
368
343
 
369
- ##
370
- # Opens log file on plugin initialization, trying to resume from an existing
371
- # file. If max file size is enabled, find the highest part number and resume
372
- # from it.
373
- def initialize_current_log
374
- @current_base_path = get_base_path
375
- if @max_file_size_kbytes > 0
376
- @size_counter = get_latest_part_number(@current_base_path)
377
- @logger.debug("GCS: resuming from latest part.",
378
- :part => @size_counter)
379
- end
380
- open_current_file()
381
- end
382
-
383
344
  ##
384
345
  # Generates new log file name based on configuration options and opens log
385
346
  # file. If max file size is enabled, part number if incremented in case the
386
347
  # the base log file name is the same (e.g. log file was not rolled given the
387
348
  # date pattern).
388
349
  def initialize_next_log
389
- new_base_path = get_base_path
390
- if @max_file_size_kbytes > 0
391
- @size_counter = @current_base_path == new_base_path ? @size_counter + 1 : 0
392
- @logger.debug("GCS: opening next log file.",
393
- :filename => @current_base_path,
394
- :part => @size_counter)
395
- else
396
- @logger.debug("GCS: opening next log file.",
397
- :filename => @current_base_path)
398
- end
399
- @current_base_path = new_base_path
350
+ @path_factory.rotate_path!
400
351
  open_current_file()
401
352
  end
402
353
 
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-google_cloud_storage'
3
- s.version = '3.0.5'
4
- s.licenses = ['Apache License (2.0)']
3
+ s.version = '3.1.0'
4
+ s.licenses = ['Apache-2.0']
5
5
  s.summary = "plugin to upload log events to Google Cloud Storage (GCS)"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
7
7
  s.authors = ["Elastic"]
@@ -0,0 +1,188 @@
1
+ # encoding: utf-8
2
+ require 'logstash/outputs/gcs/path_factory'
3
+
4
+ describe LogStash::Outputs::Gcs::PathFactory do
5
+ describe '#initialize' do
6
+ it 'includes optional fields if requested' do
7
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
8
+ builder.set_directory 'path/to/directory'
9
+ builder.set_prefix 'prefix'
10
+ builder.set_include_host true
11
+ builder.set_date_pattern ''
12
+ builder.set_include_part true
13
+ builder.set_include_uuid true
14
+ builder.set_is_gzipped true
15
+ end
16
+
17
+ vars = {
18
+ prefix: 'prefix',
19
+ host: 'hostname',
20
+ date: '2018-01-01',
21
+ uuid: '00000000-0000-0000-0000-000000000000',
22
+ partf: '333'
23
+ }
24
+
25
+ expected = 'prefix_hostname_2018-01-01.part333.00000000-0000-0000-0000-000000000000.log.gz'
26
+ expected = File.join('path/to/directory', expected)
27
+
28
+ actual = pf.current_path(vars)
29
+
30
+ expect(actual).to eq(expected)
31
+ end
32
+
33
+ it 'excludes optional fields if not requested' do
34
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
35
+ builder.set_directory 'path/to/directory'
36
+ builder.set_prefix 'prefix'
37
+ builder.set_include_host false
38
+ builder.set_date_pattern ''
39
+ builder.set_include_part false
40
+ builder.set_include_uuid false
41
+ builder.set_is_gzipped false
42
+ end
43
+
44
+ vars = {
45
+ prefix: 'prefix',
46
+ host: 'hostname',
47
+ date: '2018-01-01',
48
+ uuid: '00000000-0000-0000-0000-000000000000',
49
+ partf: '333'
50
+ }
51
+
52
+ expected = 'prefix_2018-01-01.log'
53
+ expected = File.join('path/to/directory', expected)
54
+
55
+ actual = pf.current_path(vars)
56
+
57
+ expect(actual).to eq(expected)
58
+ end
59
+
60
+ it 'loads a path immediately' do
61
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
62
+ builder.set_directory ''
63
+ builder.set_prefix ''
64
+ builder.set_include_host false
65
+ builder.set_date_pattern ''
66
+ builder.set_include_part false
67
+ builder.set_include_uuid false
68
+ builder.set_is_gzipped false
69
+ end
70
+
71
+ expect(pf.current_path).to_not eq(nil)
72
+ end
73
+
74
+ it 'recovers the starting part number' do
75
+ contents = ['pre_date.part009.log.gz', 'pre_date.part091.log.gz', 'pre_date.part000.log.gz']
76
+
77
+ allow(::File).to receive(:directory?).with('dir').and_return(true)
78
+ allow(Dir).to receive(:glob).and_return(contents)
79
+
80
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
81
+ builder.set_directory 'dir'
82
+ builder.set_prefix 'pre'
83
+ builder.set_include_host false
84
+ builder.set_date_pattern 'date'
85
+ builder.set_include_part true
86
+ builder.set_include_uuid false
87
+ builder.set_is_gzipped false
88
+ end
89
+
90
+ expect(pf.current_path).to include('part092')
91
+ end
92
+ end
93
+
94
+ describe 'rotate_path!' do
95
+ it 'increments the part number if the base has not changed' do
96
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
97
+ builder.set_directory 'dir'
98
+ builder.set_prefix 'pre'
99
+ builder.set_include_host false
100
+ builder.set_date_pattern 'date'
101
+ builder.set_include_part true
102
+ builder.set_include_uuid false
103
+ builder.set_is_gzipped false
104
+ end
105
+
106
+ expect(pf.current_path).to eq(File.join('dir', 'pre_date.part000.log'))
107
+
108
+ pf.rotate_path!
109
+ expect(pf.current_path).to eq(File.join('dir', 'pre_date.part001.log'))
110
+ end
111
+
112
+ it 'resets the part number if the base has changed' do
113
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
114
+ builder.set_directory 'dir'
115
+ builder.set_prefix 'pre'
116
+ builder.set_include_host false
117
+ builder.set_date_pattern '%N'
118
+ builder.set_include_part true
119
+ builder.set_include_uuid false
120
+ builder.set_is_gzipped false
121
+ end
122
+ expect(pf.current_path).to include('part000')
123
+
124
+ pf.rotate_path!
125
+ expect(pf.current_path).to include('part000')
126
+ end
127
+
128
+ it 'returns the current_path' do
129
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
130
+ builder.set_directory 'dir'
131
+ builder.set_prefix 'pre'
132
+ builder.set_include_host false
133
+ builder.set_date_pattern 'date'
134
+ builder.set_include_part true
135
+ builder.set_include_uuid false
136
+ builder.set_is_gzipped false
137
+ end
138
+ after = pf.rotate_path!
139
+ expect(after).to eq(File.join('dir', 'pre_date.part001.log'))
140
+ end
141
+ end
142
+
143
+ describe 'should_rotate?' do
144
+ it 'returns false when the times in the bases are the same' do
145
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
146
+ builder.set_directory ''
147
+ builder.set_prefix ''
148
+ builder.set_include_host false
149
+ builder.set_date_pattern ''
150
+ builder.set_include_part false
151
+ builder.set_include_uuid false
152
+ builder.set_is_gzipped false
153
+ end
154
+ sleep 0.1
155
+ expect(pf.should_rotate?).to eq(false)
156
+ end
157
+
158
+ it 'returns true when the times in the bases are different' do
159
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
160
+ builder.set_directory ''
161
+ builder.set_prefix ''
162
+ builder.set_include_host false
163
+ builder.set_date_pattern '%N'
164
+ builder.set_include_part false
165
+ builder.set_include_uuid false
166
+ builder.set_is_gzipped false
167
+ end
168
+ sleep 0.1
169
+ expect(pf.should_rotate?).to eq(true)
170
+ end
171
+ end
172
+
173
+ describe 'current_path' do
174
+ it 'joins the directory and filename' do
175
+ pf = LogStash::Outputs::Gcs::PathFactoryBuilder.build do |builder|
176
+ builder.set_directory 'dir'
177
+ builder.set_prefix 'pre'
178
+ builder.set_include_host false
179
+ builder.set_date_pattern 'date'
180
+ builder.set_include_part false
181
+ builder.set_include_uuid false
182
+ builder.set_is_gzipped false
183
+ end
184
+
185
+ expect(pf.current_path).to eq(File.join('dir', 'pre_date.log'))
186
+ end
187
+ end
188
+ end
@@ -32,7 +32,8 @@ describe LogStash::Outputs::GoogleCloudStorage do
32
32
  allow(subject).to receive(:new_upload_queue).and_return(upload_queue)
33
33
  subject.send(:initialize_upload_queue)
34
34
  subject.send(:initialize_temp_directory)
35
- subject.send(:initialize_current_log)
35
+ subject.send(:initialize_path_factory)
36
+ subject.send(:open_current_file)
36
37
  current_file = upload_queue.pop
37
38
  File.write(current_file, content) if content
38
39
  upload_queue.push(current_file)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-google_cloud_storage
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.5
4
+ version: 3.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-04-06 00:00:00.000000000 Z
11
+ date: 2018-05-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -115,13 +115,15 @@ files:
115
115
  - NOTICE.TXT
116
116
  - README.md
117
117
  - docs/index.asciidoc
118
+ - lib/logstash/outputs/gcs/path_factory.rb
118
119
  - lib/logstash/outputs/google_cloud_storage.rb
119
120
  - logstash-output-google_cloud_storage.gemspec
121
+ - spec/outputs/gcs/path_factory_spec.rb
120
122
  - spec/outputs/google_cloud_storage_spec.rb
121
123
  - spec/spec_helper.rb
122
124
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
123
125
  licenses:
124
- - Apache License (2.0)
126
+ - Apache-2.0
125
127
  metadata:
126
128
  logstash_plugin: 'true'
127
129
  logstash_group: output
@@ -141,10 +143,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
141
143
  version: '0'
142
144
  requirements: []
143
145
  rubyforge_project:
144
- rubygems_version: 2.6.11
146
+ rubygems_version: 2.6.13
145
147
  signing_key:
146
148
  specification_version: 4
147
149
  summary: plugin to upload log events to Google Cloud Storage (GCS)
148
150
  test_files:
151
+ - spec/outputs/gcs/path_factory_spec.rb
149
152
  - spec/outputs/google_cloud_storage_spec.rb
150
153
  - spec/spec_helper.rb