logstash-input-file 2.2.0 → 2.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 6412fcb09af0a0e32879a180c77e409c868caa43
4
- data.tar.gz: 1d2fe6993ca73c122b2002a6c72c5d0f68a1fb9a
3
+ metadata.gz: 581d2e0d0226001ca38ff0bc76e043886605b1f2
4
+ data.tar.gz: a2f6a6e76baa734b7b8632f9175f4202ed77d8c5
5
5
  SHA512:
6
- metadata.gz: 2418a21eb0d3ea85185158c69ccbb971b332da29476fcdd7508249f3c28edda61c6a98e6886484a4a9bda6fd3e2b9ac0057eb6a8a84de17b2af075f4671cbae6
7
- data.tar.gz: 189d29b3ccfbabdc6ad77d510cccb416dc0c738564676101a76beec9f37129c922f79ba613fb280d7486e70b4450693f9f08872a50372f93e467b09640cd9e73
6
+ metadata.gz: f42dcbc3e885b978c01be71259dd65ac87b7b5ec78eba41cc1f642a6902569a598c8d122fecf7c219ab96057f7adf08a4317fb2c8c8dd21e1cb36332068abbe3
7
+ data.tar.gz: 14ff48936b13aee32f0e8ed73c20355603833fc5d7705f5b6601067a559766a591063479b0e13d0f01712d32c55d4106823316bd94d5e780087cacec000b7ed4
@@ -1,3 +1,6 @@
1
+ ## 2.2.1
2
+ - Fix spec failures on CI Linux builds (not seen on local OSX and Linux)
3
+
1
4
  ## 2.2.0
2
5
  - Use ruby-filewatch 0.8.0, major rework of filewatch. See [Pull Request 74](https://github.com/jordansissel/ruby-filewatch/pull/74)
3
6
  - add max_open_files config option, defaults to 4095, the input will process much more than this but have this number of files open at any time - files are closed based on the close_older setting, thereby making others openable.
data/README.md CHANGED
@@ -1,7 +1,9 @@
1
1
  # Logstash Plugin
2
+ Travis Build
3
+ [![Travis Build Status](https://travis-ci.org/logstash-plugins/logstash-input-file.svg)](https://travis-ci.org/logstash-plugins/logstash-input-file)
2
4
 
3
- [![Build
4
- Status](http://build-eu-00.elastic.co/view/LS%20Plugins/view/LS%20Inputs/job/logstash-plugin-input-file-unit/badge/icon)](http://build-eu-00.elastic.co/view/LS%20Plugins/view/LS%20Inputs/job/logstash-plugin-input-file-unit/)
5
+ Jenkins Build
6
+ [![Build Status](http://build-eu-00.elastic.co/view/LS%20Plugins/view/LS%20Inputs/job/logstash-plugin-input-file-unit/badge/icon)](http://build-eu-00.elastic.co/view/LS%20Plugins/view/LS%20Inputs/job/logstash-plugin-input-file-unit/)
5
7
 
6
8
  This is a plugin for [Logstash](https://github.com/elastic/logstash).
7
9
 
@@ -86,4 +88,4 @@ Programming is not a required skill. Whatever you've seen about open source and
86
88
 
87
89
  It is more important to the community that you are able to contribute.
88
90
 
89
- For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file.
91
+ For more information about contributing, see the [CONTRIBUTING](https://github.com/elastic/logstash/blob/master/CONTRIBUTING.md) file.
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-file'
4
- s.version = '2.2.0'
4
+ s.version = '2.2.1'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Stream events from files."
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -28,7 +28,8 @@ Gem::Specification.new do |s|
28
28
  s.add_runtime_dependency 'logstash-codec-multiline', ['~> 2.0.7']
29
29
 
30
30
  s.add_development_dependency 'stud', ['~> 0.0.19']
31
- s.add_development_dependency 'logstash-devutils'
31
+ s.add_development_dependency 'logstash-devutils', ['~> 0.0.18']
32
32
  s.add_development_dependency 'logstash-codec-json'
33
+ s.add_development_dependency 'rspec-sequencing'
33
34
  end
34
35
 
@@ -173,21 +173,23 @@ describe LogStash::Inputs::File do
173
173
  "codec" => mlcodec,
174
174
  "delimiter" => FILE_DELIMITER)
175
175
  subject.register
176
- Thread.new { subject.run(events) }
177
176
  end
178
177
 
179
178
  it "reads the appended data only" do
180
- sleep 0.1
181
- File.open(tmpfile_path, "a") do |fd|
182
- fd.puts("hello")
183
- fd.puts("world")
184
- fd.fsync
185
- end
186
- # wait for one event, the last line is buffered
187
- expect(pause_until{ events.size == 1 }).to be_truthy
188
- subject.stop
179
+ RSpec::Sequencing
180
+ .run_after(0.1, "assert zero events then append two lines") do
181
+ expect(events.size).to eq(0)
182
+ File.open(tmpfile_path, "a") { |fd| fd.puts("hello"); fd.puts("world") }
183
+ end
184
+ .then_after(0.1, "only one event is created, the last line is buffered") do
185
+ expect(events.size).to eq(1)
186
+ end
187
+ .then_after(0.1, "quit") do
188
+ subject.stop
189
+ end
190
+ subject.run(events)
189
191
  # stop flushes the second event
190
- expect(pause_until{ events.size == 2 }).to be_truthy
192
+ expect(events.size).to eq(2)
191
193
 
192
194
  event1 = events[0]
193
195
  expect(event1).not_to be_nil
@@ -215,39 +217,44 @@ describe LogStash::Inputs::File do
215
217
  "sincedb_path" => sincedb_path,
216
218
  "stat_interval" => 0.02,
217
219
  "codec" => codec,
218
- "close_older" => 1,
220
+ "close_older" => 0.5,
219
221
  "delimiter" => FILE_DELIMITER)
220
222
 
221
223
  subject.register
222
- Thread.new { subject.run(events) }
223
224
  end
224
225
 
225
226
  it "having timed_out, the identity is evicted" do
226
- sleep 0.1
227
- File.open("#{tmpdir_path}/a.log", "a") do |fd|
228
- fd.puts(line)
229
- fd.fsync
230
- end
231
- expect(pause_until{ subject.codec.identity_count == 1 }).to be_truthy
232
- expect(codec).to receive_call_and_args(:accept, [true])
233
- # wait for expiry to kick in and close files.
234
- expect(pause_until{ subject.codec.identity_count.zero? }).to be_truthy
235
- expect(codec).to receive_call_and_args(:auto_flush, [true])
236
- subject.stop
227
+ RSpec::Sequencing
228
+ .run("create file") do
229
+ File.open("#{tmpdir_path}/a.log", "wb") { |file| file.puts(line) }
230
+ end
231
+ .then_after(0.3, "identity is mapped") do
232
+ expect(codec.trace_for(:accept)).to eq([true])
233
+ expect(subject.codec.identity_count).to eq(1)
234
+ end
235
+ .then_after(0.3, "test for auto_flush") do
236
+ expect(codec.trace_for(:auto_flush)).to eq([true])
237
+ expect(subject.codec.identity_count).to eq(0)
238
+ end
239
+ .then_after(0.1, "quit") do
240
+ subject.stop
241
+ end
242
+ subject.run(events)
237
243
  end
238
244
  end
239
245
 
240
246
  context "when ignore_older config is specified" do
241
247
  let(:line) { "line1.1-of-a" }
248
+ let(:tmp_dir_file) { "#{tmpdir_path}/a.log" }
242
249
 
243
250
  subject { described_class.new(conf) }
244
251
 
245
252
  before do
246
- File.open("#{tmpdir_path}/a.log", "a") do |fd|
253
+ File.open(tmp_dir_file, "a") do |fd|
247
254
  fd.puts(line)
248
255
  fd.fsync
249
256
  end
250
- sleep 1.1 # wait for file to age
257
+ FileInput.make_file_older(tmp_dir_file, 2)
251
258
  conf.update(
252
259
  "type" => "blah",
253
260
  "path" => "#{tmpdir_path}/*.log",
@@ -262,7 +269,7 @@ describe LogStash::Inputs::File do
262
269
  end
263
270
 
264
271
  it "the file is not read" do
265
- sleep 0.5
272
+ sleep 0.1
266
273
  subject.stop
267
274
  expect(codec).to receive_call_and_args(:accept, false)
268
275
  expect(codec).to receive_call_and_args(:auto_flush, false)
@@ -272,22 +279,6 @@ describe LogStash::Inputs::File do
272
279
 
273
280
  context "when wildcard path and a multiline codec is specified" do
274
281
  subject { described_class.new(conf) }
275
- let(:writer_proc) do
276
- -> do
277
- File.open("#{tmpdir_path}/a.log", "a") do |fd|
278
- fd.puts("line1.1-of-a")
279
- fd.puts(" line1.2-of-a")
280
- fd.puts(" line1.3-of-a")
281
- fd.fsync
282
- end
283
- File.open("#{tmpdir_path}/z.log", "a") do |fd|
284
- fd.puts("line1.1-of-z")
285
- fd.puts(" line1.2-of-z")
286
- fd.puts(" line1.3-of-z")
287
- fd.fsync
288
- end
289
- end
290
- end
291
282
 
292
283
  before do
293
284
  mlconf.update("pattern" => "^\s", "what" => "previous")
@@ -300,58 +291,76 @@ describe LogStash::Inputs::File do
300
291
  "delimiter" => FILE_DELIMITER)
301
292
 
302
293
  subject.register
303
- Thread.new { subject.run(events) }
304
- sleep 0.1
305
- writer_proc.call
306
294
  end
307
295
 
308
296
  it "collects separate multiple line events from each file" do
309
- # wait for both paths to be mapped as identities
310
- expect(pause_until{ subject.codec.identity_count == 2 }).to be_truthy
311
- subject.stop
312
- # stop flushes both events
313
- expect(pause_until{ events.size == 2 }).to be_truthy
314
-
315
- e1, e2 = events
316
- e1_message = e1["message"]
317
- e2_message = e2["message"]
318
-
319
- # can't assume File A will be read first
320
- if e1_message.start_with?('line1.1-of-z')
321
- expect(e1["path"]).to match(/z.log/)
322
- expect(e2["path"]).to match(/a.log/)
323
- expect(e1_message).to eq("line1.1-of-z#{FILE_DELIMITER} line1.2-of-z#{FILE_DELIMITER} line1.3-of-z")
324
- expect(e2_message).to eq("line1.1-of-a#{FILE_DELIMITER} line1.2-of-a#{FILE_DELIMITER} line1.3-of-a")
325
- else
326
- expect(e1["path"]).to match(/a.log/)
327
- expect(e2["path"]).to match(/z.log/)
328
- expect(e1_message).to eq("line1.1-of-a#{FILE_DELIMITER} line1.2-of-a#{FILE_DELIMITER} line1.3-of-a")
329
- expect(e2_message).to eq("line1.1-of-z#{FILE_DELIMITER} line1.2-of-z#{FILE_DELIMITER} line1.3-of-z")
330
- end
331
- end
332
-
333
- context "if auto_flush is enabled on the multiline codec" do
334
- let(:writer_proc) do
335
- -> do
336
- File.open("#{tmpdir_path}/a.log", "a") do |fd|
297
+ actions = RSpec::Sequencing
298
+ .run_after(0.1, "create files") do
299
+ File.open("#{tmpdir_path}/A.log", "wb") do |fd|
337
300
  fd.puts("line1.1-of-a")
338
301
  fd.puts(" line1.2-of-a")
339
302
  fd.puts(" line1.3-of-a")
340
303
  end
304
+ File.open("#{tmpdir_path}/z.log", "wb") do |fd|
305
+ fd.puts("line1.1-of-z")
306
+ fd.puts(" line1.2-of-z")
307
+ fd.puts(" line1.3-of-z")
308
+ end
341
309
  end
342
- end
343
- let(:mlconf) { { "auto_flush_interval" => 1 } }
310
+ .then_after(0.2, "assert both files are mapped as identities and stop") do
311
+ expect(subject.codec.identity_count).to eq(2)
312
+ end
313
+ .then_after(0.1, "stop") do
314
+ subject.stop
315
+ end
316
+ .then_after(0.2 , "stop flushes both events") do
317
+ expect(events.size).to eq(2)
318
+ e1, e2 = events
319
+ e1_message = e1["message"]
320
+ e2_message = e2["message"]
321
+
322
+ # can't assume File A will be read first
323
+ if e1_message.start_with?('line1.1-of-z')
324
+ expect(e1["path"]).to match(/z.log/)
325
+ expect(e2["path"]).to match(/A.log/)
326
+ expect(e1_message).to eq("line1.1-of-z#{FILE_DELIMITER} line1.2-of-z#{FILE_DELIMITER} line1.3-of-z")
327
+ expect(e2_message).to eq("line1.1-of-a#{FILE_DELIMITER} line1.2-of-a#{FILE_DELIMITER} line1.3-of-a")
328
+ else
329
+ expect(e1["path"]).to match(/A.log/)
330
+ expect(e2["path"]).to match(/z.log/)
331
+ expect(e1_message).to eq("line1.1-of-a#{FILE_DELIMITER} line1.2-of-a#{FILE_DELIMITER} line1.3-of-a")
332
+ expect(e2_message).to eq("line1.1-of-z#{FILE_DELIMITER} line1.2-of-z#{FILE_DELIMITER} line1.3-of-z")
333
+ end
334
+ end
335
+ subject.run(events)
336
+ # wait for actions to complete
337
+ actions.value
338
+ end
339
+
340
+ context "if auto_flush is enabled on the multiline codec" do
341
+ let(:mlconf) { { "auto_flush_interval" => 0.5 } }
344
342
 
345
343
  it "an event is generated via auto_flush" do
346
- # wait for auto_flush
347
- # without it lines are buffered and pause_until would time out i.e false
348
- expect(pause_until{ events.size == 1 }).to be_truthy
349
- subject.stop
350
-
351
- e1 = events.first
352
- e1_message = e1["message"]
353
- expect(e1["path"]).to match(/a.log/)
354
- expect(e1_message).to eq("line1.1-of-a#{FILE_DELIMITER} line1.2-of-a#{FILE_DELIMITER} line1.3-of-a")
344
+ actions = RSpec::Sequencing
345
+ .run_after(0.1, "create files") do
346
+ File.open("#{tmpdir_path}/A.log", "wb") do |fd|
347
+ fd.puts("line1.1-of-a")
348
+ fd.puts(" line1.2-of-a")
349
+ fd.puts(" line1.3-of-a")
350
+ end
351
+ end
352
+ .then_after(0.75, "wait for auto_flush") do
353
+ e1 = events.first
354
+ e1_message = e1["message"]
355
+ expect(e1["path"]).to match(/a.log/)
356
+ expect(e1_message).to eq("line1.1-of-a#{FILE_DELIMITER} line1.2-of-a#{FILE_DELIMITER} line1.3-of-a")
357
+ end
358
+ .then("stop") do
359
+ subject.stop
360
+ end
361
+ subject.run(events)
362
+ # wait for actions to complete
363
+ actions.value
355
364
  end
356
365
  end
357
366
  end
@@ -424,25 +433,30 @@ describe LogStash::Inputs::File do
424
433
  "start_position" => "beginning",
425
434
  "delimiter" => FILE_DELIMITER)
426
435
  subject.register
427
- Thread.new { subject.run(events) }
428
- sleep 0.1
429
436
  end
430
437
  it "collects line events from only one file" do
431
- # wait for one path to be mapped as identity
432
- expect(pause_until{ subject.codec.identity_count == 1 }).to be_truthy
433
- subject.stop
434
- # stop flushes last event
435
- expect(pause_until{ events.size == 2 }).to be_truthy
436
-
437
- e1, e2 = events
438
- if Dir.glob("#{tmpdir_path}/*.log").first =~ %r{a\.log}
439
- #linux and OSX have different retrieval order
440
- expect(e1["message"]).to eq("line1-of-a")
441
- expect(e2["message"]).to eq("line2-of-a")
442
- else
443
- expect(e1["message"]).to eq("line1-of-z")
444
- expect(e2["message"]).to eq("line2-of-z")
445
- end
438
+ actions = RSpec::Sequencing
439
+ .run_after(0.2, "assert one identity is mapped") do
440
+ expect(subject.codec.identity_count).to eq(1)
441
+ end
442
+ .then_after(0.1, "stop") do
443
+ subject.stop
444
+ end
445
+ .then_after(0.1, "stop flushes last event") do
446
+ expect(events.size).to eq(2)
447
+ e1, e2 = events
448
+ if Dir.glob("#{tmpdir_path}/*.log").first =~ %r{a\.log}
449
+ #linux and OSX have different retrieval order
450
+ expect(e1["message"]).to eq("line1-of-a")
451
+ expect(e2["message"]).to eq("line2-of-a")
452
+ else
453
+ expect(e1["message"]).to eq("line1-of-z")
454
+ expect(e2["message"]).to eq("line2-of-z")
455
+ end
456
+ end
457
+ subject.run(events)
458
+ # wait for actions future value
459
+ actions.value
446
460
  end
447
461
  end
448
462
 
@@ -454,28 +468,37 @@ describe LogStash::Inputs::File do
454
468
  "sincedb_path" => sincedb_path,
455
469
  "stat_interval" => 0.1,
456
470
  "max_open_files" => 1,
457
- "close_older" => 1,
471
+ "close_older" => 0.5,
458
472
  "start_position" => "beginning",
459
473
  "delimiter" => FILE_DELIMITER)
460
474
  subject.register
461
- Thread.new { subject.run(events) }
462
- sleep 0.1
463
475
  end
464
476
 
465
477
  it "collects line events from both files" do
466
- # close flushes last event of each identity
467
- expect(pause_until{ events.size == 4 }).to be_truthy
468
- subject.stop
469
- if Dir.glob("#{tmpdir_path}/*.log").first =~ %r{a\.log}
470
- #linux and OSX have different retrieval order
471
- e1, e2, e3, e4 = events
472
- else
473
- e3, e4, e1, e2 = events
474
- end
475
- expect(e1["message"]).to eq("line1-of-a")
476
- expect(e2["message"]).to eq("line2-of-a")
477
- expect(e3["message"]).to eq("line1-of-z")
478
- expect(e4["message"]).to eq("line2-of-z")
478
+ actions = RSpec::Sequencing
479
+ .run_after(0.2, "assert both identities are mapped and the first two events are built") do
480
+ expect(subject.codec.identity_count).to eq(2)
481
+ expect(events.size).to eq(2)
482
+ end
483
+ .then_after(0.8, "wait for close to flush last event of each identity") do
484
+ expect(events.size).to eq(4)
485
+ if Dir.glob("#{tmpdir_path}/*.log").first =~ %r{a\.log}
486
+ #linux and OSX have different retrieval order
487
+ e1, e2, e3, e4 = events
488
+ else
489
+ e3, e4, e1, e2 = events
490
+ end
491
+ expect(e1["message"]).to eq("line1-of-a")
492
+ expect(e2["message"]).to eq("line2-of-a")
493
+ expect(e3["message"]).to eq("line1-of-z")
494
+ expect(e4["message"]).to eq("line2-of-z")
495
+ end
496
+ .then_after(0.1, "stop") do
497
+ subject.stop
498
+ end
499
+ subject.run(events)
500
+ # wait for actions future value
501
+ actions.value
479
502
  end
480
503
  end
481
504
 
@@ -1,8 +1,14 @@
1
1
  # encoding: utf-8
2
2
 
3
3
  require "logstash/devutils/rspec/spec_helper"
4
+ require "rspec_sequencing"
4
5
 
5
6
  module FileInput
7
+ def self.make_file_older(path, seconds)
8
+ time = Time.now.to_f - seconds
9
+ File.utime(time, time, path)
10
+ end
11
+
6
12
  class TracerBase
7
13
  def initialize() @tracer = []; end
8
14
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-file
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.2.0
4
+ version: 2.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-01-29 00:00:00.000000000 Z
11
+ date: 2016-02-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-core
@@ -108,6 +108,20 @@ dependencies:
108
108
  type: :development
109
109
  - !ruby/object:Gem::Dependency
110
110
  name: logstash-devutils
111
+ version_requirements: !ruby/object:Gem::Requirement
112
+ requirements:
113
+ - - ~>
114
+ - !ruby/object:Gem::Version
115
+ version: 0.0.18
116
+ requirement: !ruby/object:Gem::Requirement
117
+ requirements:
118
+ - - ~>
119
+ - !ruby/object:Gem::Version
120
+ version: 0.0.18
121
+ prerelease: false
122
+ type: :development
123
+ - !ruby/object:Gem::Dependency
124
+ name: logstash-codec-json
111
125
  version_requirements: !ruby/object:Gem::Requirement
112
126
  requirements:
113
127
  - - '>='
@@ -121,7 +135,7 @@ dependencies:
121
135
  prerelease: false
122
136
  type: :development
123
137
  - !ruby/object:Gem::Dependency
124
- name: logstash-codec-json
138
+ name: rspec-sequencing
125
139
  version_requirements: !ruby/object:Gem::Requirement
126
140
  requirements:
127
141
  - - '>='