logstash-output-s3 0.1.0 → 0.1.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- YzM5YTk3ZjY1OWU3Mjg2YTMwZTA1NTY1ZWIxMWYyODJkMTcxMWQ5YQ==
5
- data.tar.gz: !binary |-
6
- NWQxNDkyNWEzNGEyNjE4MDQwZmE0ZTk3NjdhYjI4ZWY5ZWVmZTM2OA==
2
+ SHA1:
3
+ metadata.gz: 91981ac4b90d4e167ab69cb1567b86aa6563face
4
+ data.tar.gz: 27a65f27a4bce8bec09b7509d1d83a48d0315b08
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- YmQ1MjhiYTdjNGExOTMzYjBkYjc2MGRhOGRiODY0YWY0YjBiN2RhMjk2OWI4
10
- YjJhMjEwOTk2YzJkNWRhYTYwOWM1MDA0YWI2MjI3NjdjOGYxMjFkNTI1Yzdj
11
- MzM3ZGMwNjlkZGY4MzZmMjVhMDE4ZWQxZGVjNDVkYjBlOThmZjQ=
12
- data.tar.gz: !binary |-
13
- N2M5YTNlYzEyZjQ4MDZjZjExZDg1YjEzZDQ1MzNmNTk1ZWI0NWJlZjJjZTA0
14
- NTM5NmY3NzA1MjdlOTU5MDcwZTczZWI5ZDRiZTAxZTdhYzAzZjVlOTYxZDIy
15
- ODVlMTAxNjcwYzZkNGRjN2NmZDRjYjY0NzA4ZjRiNDcxNTUyNWQ=
6
+ metadata.gz: 5118bb281147a135758c5790795b9928d787a384eda6fb75e14ced937257155e5e675f77e618bde8d455900eb4162a15f79e6188469fe97e05fc40dd8b6f67e7
7
+ data.tar.gz: 6fa3c51b82eb6d95f4f60c74ef6a5d38ef38581f9a5217b5e15739b210c722de5ba61c547dcb40ecf989e8d8fb1694fb75921990e7712069dcb6edc58db15be9
data/Gemfile CHANGED
@@ -1,4 +1,3 @@
1
- source 'http://rubygems.org'
2
- gem 'rake'
3
- gem 'gem_publisher'
4
- gem 'archive-tar-minitar'
1
+ source 'https://rubygems.org'
2
+ gemspec
3
+ gem "logstash", :github => "elasticsearch/logstash", :branch => "1.5"
data/Rakefile CHANGED
@@ -4,3 +4,4 @@ task :default do
4
4
  system("rake -T")
5
5
  end
6
6
 
7
+ require "logstash/devutils/rake"
@@ -5,50 +5,50 @@ require "socket" # for Socket.gethostname
5
5
 
6
6
  # TODO integrate aws_config in the future
7
7
  #require "logstash/plugin_mixins/aws_config"
8
-
8
+ #
9
9
  # INFORMATION:
10
-
10
+ #
11
11
  # This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3).
12
12
  # For use it you needs authentications and an s3 bucket.
13
13
  # Be careful to have the permission to write file on S3's bucket and run logstash with super user for establish connection.
14
-
14
+ #
15
15
  # S3 plugin allows you to do something complex, let's explain:)
16
-
16
+ #
17
17
  # S3 outputs create temporary files into "/opt/logstash/S3_temp/". If you want, you can change the path at the start of register method.
18
18
  # This files have a special name, for example:
19
-
19
+ #
20
20
  # ls.s3.ip-10-228-27-95.2013-04-18T10.00.tag_hello.part0.txt
21
-
21
+ #
22
22
  # ls.s3 : indicate logstash plugin s3
23
-
23
+ #
24
24
  # "ip-10-228-27-95" : indicate you ip machine, if you have more logstash and writing on the same bucket for example.
25
25
  # "2013-04-18T10.00" : represents the time whenever you specify time_file.
26
26
  # "tag_hello" : this indicate the event's tag, you can collect events with the same tag.
27
27
  # "part0" : this means if you indicate size_file then it will generate more parts if you file.size > size_file.
28
28
  # When a file is full it will pushed on bucket and will be deleted in temporary directory.
29
29
  # If a file is empty is not pushed, but deleted.
30
-
30
+ #
31
31
  # This plugin have a system to restore the previous temporary files if something crash.
32
-
32
+ #
33
33
  ##[Note] :
34
-
34
+ #
35
35
  ## If you specify size_file and time_file then it will create file for each tag (if specified), when time_file or
36
36
  ## their size > size_file, it will be triggered then they will be pushed on s3's bucket and will delete from local disk.
37
-
37
+ #
38
38
  ## If you don't specify size_file, but time_file then it will create only one file for each tag (if specified).
39
39
  ## When time_file it will be triggered then the files will be pushed on s3's bucket and delete from local disk.
40
-
40
+ #
41
41
  ## If you don't specify time_file, but size_file then it will create files for each tag (if specified),
42
42
  ## that will be triggered when their size > size_file, then they will be pushed on s3's bucket and will delete from local disk.
43
-
43
+ #
44
44
  ## If you don't specific size_file and time_file you have a curios mode. It will create only one file for each tag (if specified).
45
45
  ## Then the file will be rest on temporary directory and don't will be pushed on bucket until we will restart logstash.
46
-
46
+ #
47
47
  # INFORMATION ABOUT CLASS:
48
-
48
+ #
49
49
  # I tried to comment the class at best i could do.
50
50
  # I think there are much thing to improve, but if you want some points to develop here a list:
51
-
51
+ #
52
52
  # TODO Integrate aws_config in the future
53
53
  # TODO Find a method to push them all files when logtstash close the session.
54
54
  # TODO Integrate @field on the path file
@@ -56,12 +56,12 @@ require "socket" # for Socket.gethostname
56
56
  # Use a while or a thread to try the connection before break a time_out and signal an error.
57
57
  # TODO If you have bugs report or helpful advice contact me, but remember that this code is much mine as much as yours,
58
58
  # try to work on it if you want :)
59
-
60
-
59
+ #
60
+ #
61
61
  # USAGE:
62
-
62
+ #
63
63
  # This is an example of logstash config:
64
-
64
+ # [source,ruby]
65
65
  # output {
66
66
  # s3{
67
67
  # access_key_id => "crazy_key" (required)
@@ -74,36 +74,36 @@ require "socket" # for Socket.gethostname
74
74
  # canned_acl => "private" (optional. Options are "private", "public_read", "public_read_write", "authenticated_read". Defaults to "private" )
75
75
  # }
76
76
  # }
77
-
77
+ #
78
78
  # We analize this:
79
-
79
+ #
80
80
  # access_key_id => "crazy_key"
81
81
  # Amazon will give you the key for use their service if you buy it or try it. (not very much open source anyway)
82
-
82
+ #
83
83
  # secret_access_key => "monkey_access_key"
84
84
  # Amazon will give you the secret_access_key for use their service if you buy it or try it . (not very much open source anyway).
85
-
85
+ #
86
86
  # endpoint_region => "eu-west-1"
87
87
  # When you make a contract with Amazon, you should know where the services you use.
88
-
88
+ #
89
89
  # bucket => "boss_please_open_your_bucket"
90
90
  # Be careful you have the permission to write on bucket and know the name.
91
-
91
+ #
92
92
  # size_file => 2048
93
93
  # Means the size, in KB, of files who can store on temporary directory before you will be pushed on bucket.
94
94
  # Is useful if you have a little server with poor space on disk and you don't want blow up the server with unnecessary temporary log files.
95
-
95
+ #
96
96
  # time_file => 5
97
97
  # Means, in minutes, the time before the files will be pushed on bucket. Is useful if you want to push the files every specific time.
98
-
98
+ #
99
99
  # format => "plain"
100
100
  # Means the format of events you want to store in the files
101
-
101
+ #
102
102
  # canned_acl => "private"
103
103
  # The S3 canned ACL to use when putting the file. Defaults to "private".
104
-
104
+ #
105
105
  # LET'S ROCK AND ROLL ON THE CODE!
106
-
106
+ #
107
107
  class LogStash::Outputs::S3 < LogStash::Outputs::Base
108
108
  #TODO integrate aws_config in the future
109
109
  # include LogStash::PluginMixins::AwsConfig
@@ -1,13 +1,13 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-s3'
4
- s.version = '0.1.0'
4
+ s.version = '0.1.1'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3)"
7
- s.description = "This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3)"
7
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
8
8
  s.authors = ["Elasticsearch"]
9
- s.email = 'richard.pijnenburg@elasticsearch.com'
10
- s.homepage = "http://logstash.net/"
9
+ s.email = 'info@elasticsearch.com'
10
+ s.homepage = "http://www.elasticsearch.org/guide/en/logstash/current/index.html"
11
11
  s.require_paths = ["lib"]
12
12
 
13
13
  # Files
@@ -17,12 +17,12 @@ Gem::Specification.new do |s|
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
18
 
19
19
  # Special flag to let us know this is actually a logstash plugin
20
- s.metadata = { "logstash_plugin" => "true", "group" => "output" }
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
21
21
 
22
22
  # Gem dependencies
23
23
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
-
24
+ s.add_runtime_dependency 'logstash-mixin-aws'
25
25
  s.add_runtime_dependency 'aws-sdk'
26
-
26
+ s.add_development_dependency 'logstash-devutils'
27
27
  end
28
28
 
@@ -1 +1,6 @@
1
- require 'spec_helper'
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require 'logstash/outputs/s3'
4
+
5
+ describe LogStash::Outputs::S3 do
6
+ end
metadata CHANGED
@@ -1,52 +1,79 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-06 00:00:00.000000000 Z
11
+ date: 2014-11-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: logstash
15
14
  requirement: !ruby/object:Gem::Requirement
16
15
  requirements:
17
- - - ! '>='
16
+ - - '>='
18
17
  - !ruby/object:Gem::Version
19
18
  version: 1.4.0
20
19
  - - <
21
20
  - !ruby/object:Gem::Version
22
21
  version: 2.0.0
23
- type: :runtime
22
+ name: logstash
24
23
  prerelease: false
24
+ type: :runtime
25
25
  version_requirements: !ruby/object:Gem::Requirement
26
26
  requirements:
27
- - - ! '>='
27
+ - - '>='
28
28
  - !ruby/object:Gem::Version
29
29
  version: 1.4.0
30
30
  - - <
31
31
  - !ruby/object:Gem::Version
32
32
  version: 2.0.0
33
33
  - !ruby/object:Gem::Dependency
34
- name: aws-sdk
35
34
  requirement: !ruby/object:Gem::Requirement
36
35
  requirements:
37
- - - ! '>='
36
+ - - '>='
38
37
  - !ruby/object:Gem::Version
39
38
  version: '0'
39
+ name: logstash-mixin-aws
40
+ prerelease: false
40
41
  type: :runtime
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - '>='
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ - !ruby/object:Gem::Dependency
48
+ requirement: !ruby/object:Gem::Requirement
49
+ requirements:
50
+ - - '>='
51
+ - !ruby/object:Gem::Version
52
+ version: '0'
53
+ name: aws-sdk
54
+ prerelease: false
55
+ type: :runtime
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - '>='
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ - !ruby/object:Gem::Dependency
62
+ requirement: !ruby/object:Gem::Requirement
63
+ requirements:
64
+ - - '>='
65
+ - !ruby/object:Gem::Version
66
+ version: '0'
67
+ name: logstash-devutils
41
68
  prerelease: false
69
+ type: :development
42
70
  version_requirements: !ruby/object:Gem::Requirement
43
71
  requirements:
44
- - - ! '>='
72
+ - - '>='
45
73
  - !ruby/object:Gem::Version
46
74
  version: '0'
47
- description: This plugin was created for store the logstash's events into Amazon Simple
48
- Storage Service (Amazon S3)
49
- email: richard.pijnenburg@elasticsearch.com
75
+ description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
76
+ email: info@elasticsearch.com
50
77
  executables: []
51
78
  extensions: []
52
79
  extra_rdoc_files: []
@@ -57,35 +84,32 @@ files:
57
84
  - Rakefile
58
85
  - lib/logstash/outputs/s3.rb
59
86
  - logstash-output-s3.gemspec
60
- - rakelib/publish.rake
61
- - rakelib/vendor.rake
62
87
  - spec/outputs/s3_spec.rb
63
- homepage: http://logstash.net/
88
+ homepage: http://www.elasticsearch.org/guide/en/logstash/current/index.html
64
89
  licenses:
65
90
  - Apache License (2.0)
66
91
  metadata:
67
92
  logstash_plugin: 'true'
68
- group: output
69
- post_install_message:
93
+ logstash_group: output
94
+ post_install_message:
70
95
  rdoc_options: []
71
96
  require_paths:
72
97
  - lib
73
98
  required_ruby_version: !ruby/object:Gem::Requirement
74
99
  requirements:
75
- - - ! '>='
100
+ - - '>='
76
101
  - !ruby/object:Gem::Version
77
102
  version: '0'
78
103
  required_rubygems_version: !ruby/object:Gem::Requirement
79
104
  requirements:
80
- - - ! '>='
105
+ - - '>='
81
106
  - !ruby/object:Gem::Version
82
107
  version: '0'
83
108
  requirements: []
84
- rubyforge_project:
85
- rubygems_version: 2.4.1
86
- signing_key:
109
+ rubyforge_project:
110
+ rubygems_version: 2.1.9
111
+ signing_key:
87
112
  specification_version: 4
88
- summary: This plugin was created for store the logstash's events into Amazon Simple
89
- Storage Service (Amazon S3)
113
+ summary: This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3)
90
114
  test_files:
91
115
  - spec/outputs/s3_spec.rb
@@ -1,9 +0,0 @@
1
- require "gem_publisher"
2
-
3
- desc "Publish gem to RubyGems.org"
4
- task :publish_gem do |t|
5
- gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
- gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
- puts "Published #{gem}" if gem
8
- end
9
-
@@ -1,169 +0,0 @@
1
- require "net/http"
2
- require "uri"
3
- require "digest/sha1"
4
-
5
- def vendor(*args)
6
- return File.join("vendor", *args)
7
- end
8
-
9
- directory "vendor/" => ["vendor"] do |task, args|
10
- mkdir task.name
11
- end
12
-
13
- def fetch(url, sha1, output)
14
-
15
- puts "Downloading #{url}"
16
- actual_sha1 = download(url, output)
17
-
18
- if actual_sha1 != sha1
19
- fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
- end
21
- end # def fetch
22
-
23
- def file_fetch(url, sha1)
24
- filename = File.basename( URI(url).path )
25
- output = "vendor/#{filename}"
26
- task output => [ "vendor/" ] do
27
- begin
28
- actual_sha1 = file_sha1(output)
29
- if actual_sha1 != sha1
30
- fetch(url, sha1, output)
31
- end
32
- rescue Errno::ENOENT
33
- fetch(url, sha1, output)
34
- end
35
- end.invoke
36
-
37
- return output
38
- end
39
-
40
- def file_sha1(path)
41
- digest = Digest::SHA1.new
42
- fd = File.new(path, "r")
43
- while true
44
- begin
45
- digest << fd.sysread(16384)
46
- rescue EOFError
47
- break
48
- end
49
- end
50
- return digest.hexdigest
51
- ensure
52
- fd.close if fd
53
- end
54
-
55
- def download(url, output)
56
- uri = URI(url)
57
- digest = Digest::SHA1.new
58
- tmp = "#{output}.tmp"
59
- Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
- request = Net::HTTP::Get.new(uri.path)
61
- http.request(request) do |response|
62
- fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
- size = (response["content-length"].to_i || -1).to_f
64
- count = 0
65
- File.open(tmp, "w") do |fd|
66
- response.read_body do |chunk|
67
- fd.write(chunk)
68
- digest << chunk
69
- if size > 0 && $stdout.tty?
70
- count += chunk.bytesize
71
- $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
- end
73
- end
74
- end
75
- $stdout.write("\r \r") if $stdout.tty?
76
- end
77
- end
78
-
79
- File.rename(tmp, output)
80
-
81
- return digest.hexdigest
82
- rescue SocketError => e
83
- puts "Failure while downloading #{url}: #{e}"
84
- raise
85
- ensure
86
- File.unlink(tmp) if File.exist?(tmp)
87
- end # def download
88
-
89
- def untar(tarball, &block)
90
- require "archive/tar/minitar"
91
- tgz = Zlib::GzipReader.new(File.open(tarball))
92
- # Pull out typesdb
93
- tar = Archive::Tar::Minitar::Input.open(tgz)
94
- tar.each do |entry|
95
- path = block.call(entry)
96
- next if path.nil?
97
- parent = File.dirname(path)
98
-
99
- mkdir_p parent unless File.directory?(parent)
100
-
101
- # Skip this file if the output file is the same size
102
- if entry.directory?
103
- mkdir path unless File.directory?(path)
104
- else
105
- entry_mode = entry.instance_eval { @mode } & 0777
106
- if File.exists?(path)
107
- stat = File.stat(path)
108
- # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
- # expose headers in the entry.
110
- entry_size = entry.instance_eval { @size }
111
- # If file sizes are same, skip writing.
112
- next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
- end
114
- puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
- File.open(path, "w") do |fd|
116
- # eof? check lets us skip empty files. Necessary because the API provided by
117
- # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
- # IO object. Something about empty files in this EntryStream causes
119
- # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
- # TODO(sissel): File a bug about this.
121
- while !entry.eof?
122
- chunk = entry.read(16384)
123
- fd.write(chunk)
124
- end
125
- #IO.copy_stream(entry, fd)
126
- end
127
- File.chmod(entry_mode, path)
128
- end
129
- end
130
- tar.close
131
- File.unlink(tarball) if File.file?(tarball)
132
- end # def untar
133
-
134
- def ungz(file)
135
-
136
- outpath = file.gsub('.gz', '')
137
- tgz = Zlib::GzipReader.new(File.open(file))
138
- begin
139
- File.open(outpath, "w") do |out|
140
- IO::copy_stream(tgz, out)
141
- end
142
- File.unlink(file)
143
- rescue
144
- File.unlink(outpath) if File.file?(outpath)
145
- raise
146
- end
147
- tgz.close
148
- end
149
-
150
- desc "Process any vendor files required for this plugin"
151
- task "vendor" do |task, args|
152
-
153
- @files.each do |file|
154
- download = file_fetch(file['url'], file['sha1'])
155
- if download =~ /.tar.gz/
156
- prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
- untar(download) do |entry|
158
- if !file['files'].nil?
159
- next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
- out = entry.full_name.split("/").last
161
- end
162
- File.join('vendor', out)
163
- end
164
- elsif download =~ /.gz/
165
- ungz(download)
166
- end
167
- end
168
-
169
- end