logstash-input-qingstor 0.1.3 → 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,53 @@
1
+ require 'logstash/inputs/qingstor'
2
+ require 'zlib'
3
+
4
+ module LogStash
5
+ module Inputs
6
+ class Qingstor
7
+ # define class LogReader to read log files
8
+ class LogReader
9
+ attr_accessor :filepath
10
+
11
+ def initialize(filepath)
12
+ @filepath = filepath
13
+ end
14
+
15
+ def read_file(&block)
16
+ if gzip?(@filepath)
17
+ read_gzip_file(block)
18
+ else
19
+ read_plain_file(block)
20
+ end
21
+ end
22
+
23
+ def read_gzip_file(block)
24
+ Zlib::GzipReader.open(@filepath) do |decoder|
25
+ decoder.each_line { |line| block.call(line) }
26
+ end
27
+ rescue Zlib::Error, Zlib::GzipFile::Error => e
28
+ @logger.error('Gzip codec: Cannot uncompress the file',
29
+ :filepath => @filepath)
30
+ raise e
31
+ end
32
+
33
+ def read_plain_file(block)
34
+ ::File.open(@filepath, 'rb') do |file|
35
+ file.each(&block)
36
+ end
37
+ end
38
+
39
+ def valid_format?(filepath)
40
+ logger?(filepath) || gzip?(filepath)
41
+ end
42
+
43
+ def logger?(filepath)
44
+ filepath.end_with?('.log', '.txt')
45
+ end
46
+
47
+ def gzip?(filepath)
48
+ filepath.end_with?('.gz')
49
+ end
50
+ end # class LogReader
51
+ end # class QingStor
52
+ end # module Inputs
53
+ end # module LogStash
@@ -1,30 +1,35 @@
1
- # encoding: utf-8
2
- require "qingstor/sdk"
3
- require "fileutils"
1
+ require 'logstash/inputs/qingstor'
2
+ require 'qingstor/sdk'
4
3
 
5
- module LogStash
6
- module Inputs
7
- class Qingstor
8
- class QingstorValidator
4
+ # Validator for check the avaliablity of setting in QingStor
5
+ module QingstorValidator
6
+ def bucket_valid?(bucket)
7
+ res = bucket.head
8
+ case res[:status_code]
9
+ when 401
10
+ raise LogStash::ConfigurationError,
11
+ 'Incorrect key id or access key.'
12
+ when 404
13
+ raise LogStash::ConfigurationError,
14
+ 'Incorrect bucket/region name.'
15
+ end
16
+ true
17
+ end
9
18
 
10
- def self.bucket_valid?(bucket)
11
- res = bucket.head
12
- case res[:status_code]
13
- when 401
14
- raise LogStash::ConfigurationError, "Incorrect key id or access key."
15
- when 404
16
- raise LogStash::ConfigurationError, "Incorrect bucket/region name."
17
- end
18
- true
19
- end
19
+ def prefix_valid?(prefix)
20
+ if prefix.start_with?('/') || prefix.length >= 1024
21
+ raise LogStash::ConfigurationError, 'Prefix must not start with '\
22
+ + "'/' with length less than 1024"
23
+ end
24
+ true
25
+ end
20
26
 
21
- def self.prefix_valid?(prefix)
22
- if prefix.start_with?("/") || prefix.length >= 1024
23
- raise LogStash::ConfigurationError, "Prefix must not start with '/' with length less than 1024 "
24
- end
25
- true
26
- end
27
- end
28
- end
29
- end
30
- end
27
+ def create_if_not_exist(bucket)
28
+ return if bucket.head[:status_code] == 200
29
+ res = bucket.put
30
+ if res[:status_code] != 201
31
+ @logger.error('ERROR : cannot create the bucket ', res[:message])
32
+ raise LogStash::ConfigurationError, 'cannot create the bucket'
33
+ end
34
+ end # def create_if_not_exist
35
+ end # module QingstorValidator
@@ -0,0 +1,36 @@
1
+ require 'logstash/inputs/qingstor'
2
+ require 'fileutils'
3
+
4
+ # module used for record the download history
5
+ module LogStash
6
+ module Inputs
7
+ class Qingstor
8
+ # define the class SinceDB::File
9
+ class SinceDB
10
+ def initialize(file)
11
+ @sincedb_path = file
12
+ end
13
+
14
+ def newer?(date)
15
+ Time.at(date) > read
16
+ end
17
+
18
+ def read
19
+ if ::File.exist?(@sincedb_path)
20
+ content = ::File.read(@sincedb_path).chomp.strip
21
+ content.empty? ? Time.new(0) : Time.parse(content)
22
+ else
23
+ Time.new(0)
24
+ end
25
+ end
26
+
27
+ def write(since = nil)
28
+ since = Time.now if since.nil?
29
+ dir = ::File.dirname(@sincedb_path)
30
+ FileUtils.mkdir_p(dir) unless ::File.directory?(dir)
31
+ ::File.open(@sincedb_path, 'w') { |file| file.write(since.to_s) }
32
+ end
33
+ end # class FILE
34
+ end # class QingStor
35
+ end # module Inputs
36
+ end # module LogStash
@@ -0,0 +1,56 @@
1
+ require 'logstash/inputs/qingstor'
2
+ require 'qingstor/sdk'
3
+ require 'concurrent'
4
+
5
+ module LogStash
6
+ module Inputs
7
+ class Qingstor
8
+ # define class Uploader to process upload jobs
9
+ class Uploader
10
+ require 'logstash/inputs/qingstor/qingstor_validator'
11
+ include QingstorValidator
12
+
13
+ TIME_BEFORE_RETRYING_SECONDS = 1
14
+ DEFAULT_THREADPOOL = Concurrent::ThreadPoolExecutor.new(
15
+ :min_thread => 1,
16
+ :max_thread => 8,
17
+ :max_queue => 2,
18
+ :fallback_policy => :caller_runs
19
+ )
20
+
21
+ attr_reader :bucket, :prefix, :logger
22
+
23
+ def initialize(bucket, prefix, logger)
24
+ @bucket = bucket
25
+ @prefix = prefix
26
+ @logger = logger
27
+ @workers_pool = DEFAULT_THREADPOOL
28
+ end
29
+
30
+ def upload_async(filename, filepath)
31
+ @workers_pool.post do
32
+ upload(filename, filepath)
33
+ end
34
+ end
35
+
36
+ def upload(filename, filepath)
37
+ create_if_not_exist(@bucket)
38
+ file_md5 = Digest::MD5.file(filepath).to_s
39
+ key = if @prefix.end_with?('/') || @prefix.empty?
40
+ @prefix + filename
41
+ else
42
+ @prefix + '/' + filename
43
+ end
44
+ @logger.debug('uploading backup file', :file => filename)
45
+ @bucket.put_object(key, 'content_md5' => file_md5,
46
+ 'body' => ::File.open(filepath))
47
+ end
48
+
49
+ def stop
50
+ @workers_pool.shutdown
51
+ @workers_pool.wait_for_termination(nil)
52
+ end
53
+ end # class Uploader
54
+ end # class QingStor
55
+ end # module Inputs
56
+ end # module LogStash
@@ -1,27 +1,29 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-qingstor'
3
- s.version = '0.1.3'
3
+ s.version = '0.1.5'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = 'logstash input plugin for QingStor'
6
- s.description = 'Use this plugin to fetch file from Qingstor as the input of logstash'
7
- s.homepage = 'https://github.com/Tacinight/logstash-input-qingstor'
6
+ s.description = 'Fetch file from Qingstor as the input of logstash'
7
+ s.homepage = 'https://github.com/yunify/logstash-input-qingstor'
8
8
  s.authors = ['Evan Zhao']
9
9
  s.email = 'tacingiht@gmail.com'
10
10
  s.require_paths = ['lib']
11
11
 
12
12
  # Files
13
- s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
14
- # Tests
13
+ s.files = Dir['lib/**/*', 'spec/**/*', 'vendor/**/*', '*.gemspec', '*.md',
14
+ 'CONTRIBUTORS', 'Gemfile', 'LICENSE', 'NOTICE.TXT']
15
+
16
+ # Tests
15
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
16
18
 
17
19
  # Special flag to let us know this is actually a logstash plugin
18
- s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
20
+ s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'input' }
19
21
 
20
22
  # Gem dependencies
21
- s.add_runtime_dependency "logstash-core-plugin-api", ">=1.6", "<=2.99"
23
+ s.add_runtime_dependency 'logstash-core-plugin-api', '>=1.6', '<=2.99'
22
24
  s.add_runtime_dependency 'logstash-codec-plain'
23
25
  s.add_runtime_dependency 'stud', '>= 0.0.22'
24
- s.add_runtime_dependency "qingstor-sdk", ">=1.9.2"
26
+ s.add_runtime_dependency 'qingstor-sdk', '>=1.9.2'
25
27
 
26
28
  s.add_development_dependency 'logstash-devutils'
27
- end
29
+ end
@@ -0,0 +1,48 @@
1
+ require 'logstash/devutils/rspec/spec_helper'
2
+ require 'logstash/inputs/qingstor/log_reader'
3
+ require 'tmpdir'
4
+
5
+ describe LogStash::Inputs::Qingstor::LogReader do
6
+ subject(:log_reader) { described_class.new('/a/example/path') }
7
+
8
+ let(:content) { 'may the code be with you!' }
9
+ let(:plain_file_path) { File.join(Dir.tmpdir, 'plain.log') }
10
+ let(:gzip_file_path) { File.join(Dir.tmpdir, 'gzip.gz') }
11
+ let(:invalid_file_path) { File.join(Dir.tmpdir, 'invalid.ivd') }
12
+
13
+ context 'when read plain file' do
14
+ before do
15
+ File.open(plain_file_path, 'w') do |f|
16
+ f.write(content)
17
+ end
18
+ end
19
+
20
+ it do
21
+ log_reader.filepath = plain_file_path
22
+ log_reader.read_file do |f|
23
+ expect(f).to eq(content)
24
+ end
25
+ end
26
+ end
27
+
28
+ context 'when read gzip file' do
29
+ before do
30
+ Zlib::GzipWriter.open(gzip_file_path) do |gz|
31
+ gz.write(content)
32
+ end
33
+ end
34
+
35
+ it do
36
+ log_reader.filepath = gzip_file_path
37
+ log_reader.read_file do |f|
38
+ expect(f).to eq(content)
39
+ end
40
+ end
41
+ end
42
+
43
+ context 'when valid format' do
44
+ it { expect(log_reader.valid_format?(plain_file_path)).to be_truthy }
45
+ it { expect(log_reader.valid_format?(gzip_file_path)).to be_truthy }
46
+ it { expect(log_reader.valid_format?(invalid_file_path)).to be_falsey }
47
+ end
48
+ end
@@ -0,0 +1,27 @@
1
+ require 'logstash/devutils/rspec/spec_helper'
2
+ require 'logstash/inputs/qingstor/sincedb'
3
+ require 'tmpdir'
4
+
5
+ describe LogStash::Inputs::Qingstor::SinceDB do
6
+ subject(:sincedb) { described_class.new(sincedb_path) }
7
+
8
+ let(:sincedb_path) { File.join(Dir.tmpdir, 'log_tmp_dir/log_tmp.log') }
9
+
10
+ context 'when run at first time' do
11
+ before do
12
+ File.delete(sincedb_path) if File.exist?(sincedb_path)
13
+ end
14
+
15
+ it { expect(sincedb.read).to eq(Time.new(0)) }
16
+ it { expect(sincedb.newer?(Time.now)).to be_truthy }
17
+ end
18
+
19
+ context 'when write the record' do
20
+ it do
21
+ time = Time.now
22
+ sincedb.write(time)
23
+ content = File.read(sincedb_path).chomp.strip
24
+ expect(content).to eq(time.to_s)
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,42 @@
1
+ require 'logstash/devutils/rspec/spec_helper'
2
+ require 'logstash/inputs/qingstor/uploader'
3
+ require 'qingstor/sdk'
4
+ require 'stud/temporary'
5
+ require_relative '../qs_access_helper'
6
+
7
+ describe LogStash::Inputs::Qingstor::Uploader do
8
+ let(:bucket) { qs_init_bucket }
9
+ let(:new_bucket) { qs_init_bucket }
10
+ let(:key) { 'foobar' }
11
+ let(:file) { Stud::Temporary.file }
12
+ let(:filepath) { file.path }
13
+ let(:logger) { spy(:logger) }
14
+
15
+ context 'when upload file' do
16
+ let(:prefix) { '' }
17
+
18
+ after do
19
+ delete_remote_file(prefix + key)
20
+ end
21
+
22
+ it do
23
+ uploader = described_class.new(bucket, prefix, logger)
24
+ uploader.upload(key, filepath)
25
+ expect(list_remote_file.size).to eq(1)
26
+ end
27
+ end
28
+
29
+ context 'when upload file with a prefix' do
30
+ let(:prefix) { 'a/prefix/' }
31
+
32
+ after do
33
+ delete_remote_file(prefix + key)
34
+ end
35
+
36
+ it do
37
+ uploader = described_class.new(bucket, prefix, logger)
38
+ uploader.upload(key, filepath)
39
+ expect(list_remote_file.size).to eq(1)
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,98 @@
1
+ require 'logstash/devutils/rspec/spec_helper'
2
+ require 'logstash/inputs/qingstor'
3
+ require_relative './qs_access_helper'
4
+ require 'tmpdir'
5
+
6
+ describe LogStash::Inputs::Qingstor do
7
+ before do
8
+ Thread.abort_on_exception = true
9
+
10
+ upload_file('../../fixtures/logstash.log', 'log3.log')
11
+ upload_file('../../fixtures/logstash.log.gz', 'log3.log.gz')
12
+ end
13
+
14
+ after do
15
+ delete_remote_file 'log3.log'
16
+ delete_remote_file 'log3.log.gz'
17
+ end
18
+
19
+ let(:config) do
20
+ { 'access_key_id' => ENV['access_key_id'],
21
+ 'secret_access_key' => ENV['secret_access_key'],
22
+ 'bucket' => ENV['bucket'],
23
+ 'region' => ENV['region'] }
24
+ end
25
+
26
+ let(:key1) { 'log3.log' }
27
+ let(:key2) { 'log3.log.gz' }
28
+ let(:backup) { 'evamax' }
29
+ let(:local_backup_dir) { File.join(Dir.tmpdir, backup) }
30
+
31
+ context 'when at the local' do
32
+ it 'backup to local dir' do
33
+ fetch_events(config.merge('backup_local_dir' => local_backup_dir))
34
+ expect(File.exist?(File.join(local_backup_dir, key1))).to be_truthy
35
+ expect(File.exist?(File.join(local_backup_dir, key2))).to be_truthy
36
+ end
37
+
38
+ after do
39
+ FileUtils.rm_r(File.join(local_backup_dir, key1))
40
+ FileUtils.rm_r(File.join(local_backup_dir, key2))
41
+ end
42
+ end
43
+
44
+ context 'when backup to the remote end' do
45
+ it do
46
+ fetch_events(config.merge('backup_bucket' => backup))
47
+ expect(list_remote_file(backup).size).to eq(2)
48
+ end
49
+
50
+ after do
51
+ clean_and_delete_bucket(backup)
52
+ end
53
+ end
54
+
55
+ context 'when test host redirection' do
56
+ it 'redirect without a port number' do
57
+ expect { fetch_events(config.merge('host' => 'qingstor.dev')) }
58
+ .to raise_error(Net::HTTP::Persistent::Error)
59
+ end
60
+
61
+ it 'redirect with a port number' do
62
+ new_config = config.merge('host' => 'qingstor.dev', 'port' => 444)
63
+ expect { fetch_events(new_config) }
64
+ .to raise_error(Net::HTTP::Persistent::Error)
65
+ end
66
+ end
67
+
68
+ context 'when test with various config values' do
69
+ it do
70
+ config['access_key_id'] = 'wrongid'
71
+ expect { described_class.new(config).register }
72
+ .to raise_error(LogStash::ConfigurationError)
73
+ end
74
+
75
+ it do
76
+ config['secret_access_key'] = 'wrongaccesskey'
77
+ expect { described_class.new(config).register }
78
+ .to raise_error(LogStash::ConfigurationError)
79
+ end
80
+
81
+ it do
82
+ config['bucket'] = 'wrongbucket'
83
+ expect { described_class.new(config).register }
84
+ .to raise_error(LogStash::ConfigurationError)
85
+ end
86
+
87
+ it do
88
+ config['region'] = 'wrongregion'
89
+ expect { described_class.new(config).register }
90
+ .to raise_error(LogStash::ConfigurationError)
91
+ end
92
+
93
+ it do
94
+ config.delete('region')
95
+ expect(described_class.new(config).register).to be_truthy
96
+ end
97
+ end
98
+ end