logstash-filter-date 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -13
- data/Gemfile +3 -3
- data/Rakefile +1 -0
- data/lib/logstash/filters/date.rb +18 -18
- data/logstash-filter-date.gemspec +6 -5
- data/spec/filters/date_spec.rb +1 -1
- metadata +47 -36
- data/rakelib/publish.rake +0 -9
- data/rakelib/vendor.rake +0 -169
checksums.yaml
CHANGED
@@ -1,15 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
|
5
|
-
data.tar.gz: !binary |-
|
6
|
-
ZTkyNDQxOTk3OWExNmU0ZGIxZDRlMjVjODgxODNhYzgzOTAwNGMzOQ==
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 331f7996273736e423187c426637944c0fc3c85c
|
4
|
+
data.tar.gz: bb17b335910d90219831c565f34de107f0a16f56
|
7
5
|
SHA512:
|
8
|
-
metadata.gz:
|
9
|
-
|
10
|
-
NTVhNTg3MjY2YjdlNDI4YjY1YWI5Y2Y1OGUwYTY0NzU5OWMwZmFiMjNjMDEy
|
11
|
-
YzI4MzEwZWNhYTEyYTFmOTllMThiZmRhYjU3MTg2MjYzMDY3MWU=
|
12
|
-
data.tar.gz: !binary |-
|
13
|
-
N2MwNGI4Zjk0ZjFjOGNkZWZkZDhhOGNjMDdhZTE4OTcwM2ZiY2I5Nzk2ZDky
|
14
|
-
OGRkMTgwNDQ4ZTQ2MjQ3ZWZhYWE5Mjg1NTVmZjVhNWZlMGMxNTVlMmM5MzFk
|
15
|
-
ZTFmZGQ5NGE4ZTI2NmRkMDAwYmY4ZDExMmUwYTk1ZWFiMzZiYTc=
|
6
|
+
metadata.gz: 62e65449fe75ae789c79e637f91f600a6be2a2d1f474252c5e536be7d9438635569046c97ae495c146fa1477813f1c1538ef9bd870b9ca9f5953c29274fef1f1
|
7
|
+
data.tar.gz: da8ba2fe277640599556a2c4b9d3c627fc19af96190b5b6337c46126854008229c1919f87e1881307a946e9a2c8143d9d2cd5c1db2ae6886b90a4ec53c389a8f
|
data/Gemfile
CHANGED
@@ -1,3 +1,3 @@
|
|
1
|
-
source '
|
2
|
-
|
3
|
-
gem
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
gemspec
|
3
|
+
gem "logstash", :github => "elasticsearch/logstash", :branch => "1.5"
|
data/Rakefile
CHANGED
@@ -7,10 +7,10 @@ require "logstash/timestamp"
|
|
7
7
|
# date or timestamp as the logstash timestamp for the event.
|
8
8
|
#
|
9
9
|
# For example, syslog events usually have timestamps like this:
|
10
|
-
#
|
10
|
+
# [source,ruby]
|
11
11
|
# "Apr 17 09:32:01"
|
12
12
|
#
|
13
|
-
# You would use the date format
|
13
|
+
# You would use the date format `MMM dd HH:mm:ss` to parse this.
|
14
14
|
#
|
15
15
|
# The date filter is especially important for sorting events and for
|
16
16
|
# backfilling old data. If you don't get the date correct in your
|
@@ -30,7 +30,7 @@ class LogStash::Filters::Date < LogStash::Filters::Base
|
|
30
30
|
milestone 3
|
31
31
|
|
32
32
|
# Specify a time zone canonical ID to be used for date parsing.
|
33
|
-
# The valid IDs are listed on the [Joda.org available time zones page]
|
33
|
+
# The valid IDs are listed on the http://joda-time.sourceforge.net/timezones.html[Joda.org available time zones page].
|
34
34
|
# This is useful in case the time zone cannot be extracted from the value,
|
35
35
|
# and is not the platform default.
|
36
36
|
# If this is not specified the platform default will be used.
|
@@ -42,38 +42,38 @@ class LogStash::Filters::Date < LogStash::Filters::Base
|
|
42
42
|
# Simple examples are `en`,`en-US` for BCP47 or `en_US` for POSIX.
|
43
43
|
# If not specified, the platform default will be used.
|
44
44
|
#
|
45
|
-
# The locale is mostly necessary to be set for parsing month names (pattern with MMM) and
|
46
|
-
# weekday names (pattern with EEE).
|
45
|
+
# The locale is mostly necessary to be set for parsing month names (pattern with `MMM`) and
|
46
|
+
# weekday names (pattern with `EEE`).
|
47
47
|
#
|
48
48
|
config :locale, :validate => :string
|
49
49
|
|
50
50
|
# The date formats allowed are anything allowed by Joda-Time (java time
|
51
51
|
# library). You can see the docs for this format here:
|
52
52
|
#
|
53
|
-
#
|
53
|
+
# http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html[joda.time.format.DateTimeFormat]
|
54
54
|
#
|
55
55
|
# An array with field name first, and format patterns following, `[ field,
|
56
56
|
# formats... ]`
|
57
57
|
#
|
58
58
|
# If your time field has multiple possible formats, you can do this:
|
59
|
-
#
|
59
|
+
# [source,ruby]
|
60
60
|
# match => [ "logdate", "MMM dd YYY HH:mm:ss",
|
61
61
|
# "MMM d YYY HH:mm:ss", "ISO8601" ]
|
62
62
|
#
|
63
|
-
# The above will match a syslog (rfc3164) or iso8601 timestamp.
|
63
|
+
# The above will match a syslog (rfc3164) or `iso8601` timestamp.
|
64
64
|
#
|
65
65
|
# There are a few special exceptions. The following format literals exist
|
66
66
|
# to help you save time and ensure correctness of date parsing.
|
67
67
|
#
|
68
|
-
# *
|
69
|
-
# 2011-04-19T03:44:01.103Z
|
70
|
-
# *
|
71
|
-
# *
|
72
|
-
# *
|
73
|
-
#
|
74
|
-
# For example, if you have a field 'logdate', with a value that looks like
|
75
|
-
# 'Aug 13 2010 00:03:44', you would use this configuration:
|
68
|
+
# * `ISO8601` - should parse any valid ISO8601 timestamp, such as
|
69
|
+
# `2011-04-19T03:44:01.103Z`
|
70
|
+
# * `UNIX` - will parse unix time in seconds since epoch
|
71
|
+
# * `UNIX_MS` - will parse unix time in milliseconds since epoch
|
72
|
+
# * `TAI64N` - will parse tai64n time values
|
76
73
|
#
|
74
|
+
# For example, if you have a field `logdate`, with a value that looks like
|
75
|
+
# `Aug 13 2010 00:03:44`, you would use this configuration:
|
76
|
+
# [source,ruby]
|
77
77
|
# filter {
|
78
78
|
# date {
|
79
79
|
# match => [ "logdate", "MMM dd YYYY HH:mm:ss" ]
|
@@ -81,12 +81,12 @@ class LogStash::Filters::Date < LogStash::Filters::Base
|
|
81
81
|
# }
|
82
82
|
#
|
83
83
|
# If your field is nested in your structure, you can use the nested
|
84
|
-
# syntax [foo][bar] to match its value. For more information, please refer to
|
84
|
+
# syntax `[foo][bar]` to match its value. For more information, please refer to
|
85
85
|
# http://logstash.net/docs/latest/configuration#fieldreferences
|
86
86
|
config :match, :validate => :array, :default => []
|
87
87
|
|
88
88
|
# Store the matching timestamp into the given target field. If not provided,
|
89
|
-
# default to updating the
|
89
|
+
# default to updating the `@timestamp` field of the event.
|
90
90
|
config :target, :validate => :string, :default => "@timestamp"
|
91
91
|
|
92
92
|
# LOGSTASH-34
|
@@ -1,13 +1,13 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-filter-date'
|
4
|
-
s.version = '0.1.
|
4
|
+
s.version = '0.1.1'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "The date filter is used for parsing dates from fields, and then using that date or timestamp as the logstash timestamp for the event."
|
7
|
-
s.description = "
|
7
|
+
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
8
8
|
s.authors = ["Elasticsearch"]
|
9
|
-
s.email = '
|
10
|
-
s.homepage = "http://logstash.
|
9
|
+
s.email = 'info@elasticsearch.com'
|
10
|
+
s.homepage = "http://www.elasticsearch.org/guide/en/logstash/current/index.html"
|
11
11
|
s.require_paths = ["lib"]
|
12
12
|
|
13
13
|
# Files
|
@@ -17,12 +17,13 @@ Gem::Specification.new do |s|
|
|
17
17
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
18
|
|
19
19
|
# Special flag to let us know this is actually a logstash plugin
|
20
|
-
s.metadata = { "logstash_plugin" => "true", "
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
|
21
21
|
|
22
22
|
# Gem dependencies
|
23
23
|
s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
|
24
24
|
s.add_runtime_dependency 'logstash-input-generator'
|
25
25
|
s.add_runtime_dependency 'logstash-codec-json'
|
26
26
|
s.add_runtime_dependency 'logstash-output-null'
|
27
|
+
s.add_development_dependency 'logstash-devutils'
|
27
28
|
end
|
28
29
|
|
data/spec/filters/date_spec.rb
CHANGED
metadata
CHANGED
@@ -1,79 +1,93 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-filter-date
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2014-11-
|
11
|
+
date: 2014-11-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash
|
15
|
-
|
15
|
+
version_requirements: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
|
-
- -
|
17
|
+
- - '>='
|
18
18
|
- !ruby/object:Gem::Version
|
19
19
|
version: 1.4.0
|
20
20
|
- - <
|
21
21
|
- !ruby/object:Gem::Version
|
22
22
|
version: 2.0.0
|
23
|
-
|
24
|
-
prerelease: false
|
25
|
-
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirement: !ruby/object:Gem::Requirement
|
26
24
|
requirements:
|
27
|
-
- -
|
25
|
+
- - '>='
|
28
26
|
- !ruby/object:Gem::Version
|
29
27
|
version: 1.4.0
|
30
28
|
- - <
|
31
29
|
- !ruby/object:Gem::Version
|
32
30
|
version: 2.0.0
|
31
|
+
prerelease: false
|
32
|
+
type: :runtime
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
34
|
name: logstash-input-generator
|
35
|
+
version_requirements: !ruby/object:Gem::Requirement
|
36
|
+
requirements:
|
37
|
+
- - '>='
|
38
|
+
- !ruby/object:Gem::Version
|
39
|
+
version: '0'
|
35
40
|
requirement: !ruby/object:Gem::Requirement
|
36
41
|
requirements:
|
37
|
-
- -
|
42
|
+
- - '>='
|
38
43
|
- !ruby/object:Gem::Version
|
39
44
|
version: '0'
|
40
|
-
type: :runtime
|
41
45
|
prerelease: false
|
46
|
+
type: :runtime
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: logstash-codec-json
|
42
49
|
version_requirements: !ruby/object:Gem::Requirement
|
43
50
|
requirements:
|
44
|
-
- -
|
51
|
+
- - '>='
|
45
52
|
- !ruby/object:Gem::Version
|
46
53
|
version: '0'
|
47
|
-
- !ruby/object:Gem::Dependency
|
48
|
-
name: logstash-codec-json
|
49
54
|
requirement: !ruby/object:Gem::Requirement
|
50
55
|
requirements:
|
51
|
-
- -
|
56
|
+
- - '>='
|
52
57
|
- !ruby/object:Gem::Version
|
53
58
|
version: '0'
|
54
|
-
type: :runtime
|
55
59
|
prerelease: false
|
60
|
+
type: :runtime
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
name: logstash-output-null
|
56
63
|
version_requirements: !ruby/object:Gem::Requirement
|
57
64
|
requirements:
|
58
|
-
- -
|
65
|
+
- - '>='
|
59
66
|
- !ruby/object:Gem::Version
|
60
67
|
version: '0'
|
61
|
-
- !ruby/object:Gem::Dependency
|
62
|
-
name: logstash-output-null
|
63
68
|
requirement: !ruby/object:Gem::Requirement
|
64
69
|
requirements:
|
65
|
-
- -
|
70
|
+
- - '>='
|
66
71
|
- !ruby/object:Gem::Version
|
67
72
|
version: '0'
|
68
|
-
type: :runtime
|
69
73
|
prerelease: false
|
74
|
+
type: :runtime
|
75
|
+
- !ruby/object:Gem::Dependency
|
76
|
+
name: logstash-devutils
|
70
77
|
version_requirements: !ruby/object:Gem::Requirement
|
71
78
|
requirements:
|
72
|
-
- -
|
79
|
+
- - '>='
|
80
|
+
- !ruby/object:Gem::Version
|
81
|
+
version: '0'
|
82
|
+
requirement: !ruby/object:Gem::Requirement
|
83
|
+
requirements:
|
84
|
+
- - '>='
|
73
85
|
- !ruby/object:Gem::Version
|
74
86
|
version: '0'
|
75
|
-
|
76
|
-
|
87
|
+
prerelease: false
|
88
|
+
type: :development
|
89
|
+
description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
|
90
|
+
email: info@elasticsearch.com
|
77
91
|
executables: []
|
78
92
|
extensions: []
|
79
93
|
extra_rdoc_files: []
|
@@ -84,35 +98,32 @@ files:
|
|
84
98
|
- Rakefile
|
85
99
|
- lib/logstash/filters/date.rb
|
86
100
|
- logstash-filter-date.gemspec
|
87
|
-
- rakelib/publish.rake
|
88
|
-
- rakelib/vendor.rake
|
89
101
|
- spec/filters/date_spec.rb
|
90
|
-
homepage: http://logstash.
|
102
|
+
homepage: http://www.elasticsearch.org/guide/en/logstash/current/index.html
|
91
103
|
licenses:
|
92
104
|
- Apache License (2.0)
|
93
105
|
metadata:
|
94
106
|
logstash_plugin: 'true'
|
95
|
-
|
96
|
-
post_install_message:
|
107
|
+
logstash_group: filter
|
108
|
+
post_install_message:
|
97
109
|
rdoc_options: []
|
98
110
|
require_paths:
|
99
111
|
- lib
|
100
112
|
required_ruby_version: !ruby/object:Gem::Requirement
|
101
113
|
requirements:
|
102
|
-
- -
|
114
|
+
- - '>='
|
103
115
|
- !ruby/object:Gem::Version
|
104
116
|
version: '0'
|
105
117
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
106
118
|
requirements:
|
107
|
-
- -
|
119
|
+
- - '>='
|
108
120
|
- !ruby/object:Gem::Version
|
109
121
|
version: '0'
|
110
122
|
requirements: []
|
111
|
-
rubyforge_project:
|
112
|
-
rubygems_version: 2.4.
|
113
|
-
signing_key:
|
123
|
+
rubyforge_project:
|
124
|
+
rubygems_version: 2.4.4
|
125
|
+
signing_key:
|
114
126
|
specification_version: 4
|
115
|
-
summary: The date filter is used for parsing dates from fields, and then using that
|
116
|
-
date or timestamp as the logstash timestamp for the event.
|
127
|
+
summary: The date filter is used for parsing dates from fields, and then using that date or timestamp as the logstash timestamp for the event.
|
117
128
|
test_files:
|
118
129
|
- spec/filters/date_spec.rb
|
data/rakelib/publish.rake
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
require "gem_publisher"
|
2
|
-
|
3
|
-
desc "Publish gem to RubyGems.org"
|
4
|
-
task :publish_gem do |t|
|
5
|
-
gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
|
6
|
-
gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
|
7
|
-
puts "Published #{gem}" if gem
|
8
|
-
end
|
9
|
-
|
data/rakelib/vendor.rake
DELETED
@@ -1,169 +0,0 @@
|
|
1
|
-
require "net/http"
|
2
|
-
require "uri"
|
3
|
-
require "digest/sha1"
|
4
|
-
|
5
|
-
def vendor(*args)
|
6
|
-
return File.join("vendor", *args)
|
7
|
-
end
|
8
|
-
|
9
|
-
directory "vendor/" => ["vendor"] do |task, args|
|
10
|
-
mkdir task.name
|
11
|
-
end
|
12
|
-
|
13
|
-
def fetch(url, sha1, output)
|
14
|
-
|
15
|
-
puts "Downloading #{url}"
|
16
|
-
actual_sha1 = download(url, output)
|
17
|
-
|
18
|
-
if actual_sha1 != sha1
|
19
|
-
fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
|
20
|
-
end
|
21
|
-
end # def fetch
|
22
|
-
|
23
|
-
def file_fetch(url, sha1)
|
24
|
-
filename = File.basename( URI(url).path )
|
25
|
-
output = "vendor/#{filename}"
|
26
|
-
task output => [ "vendor/" ] do
|
27
|
-
begin
|
28
|
-
actual_sha1 = file_sha1(output)
|
29
|
-
if actual_sha1 != sha1
|
30
|
-
fetch(url, sha1, output)
|
31
|
-
end
|
32
|
-
rescue Errno::ENOENT
|
33
|
-
fetch(url, sha1, output)
|
34
|
-
end
|
35
|
-
end.invoke
|
36
|
-
|
37
|
-
return output
|
38
|
-
end
|
39
|
-
|
40
|
-
def file_sha1(path)
|
41
|
-
digest = Digest::SHA1.new
|
42
|
-
fd = File.new(path, "r")
|
43
|
-
while true
|
44
|
-
begin
|
45
|
-
digest << fd.sysread(16384)
|
46
|
-
rescue EOFError
|
47
|
-
break
|
48
|
-
end
|
49
|
-
end
|
50
|
-
return digest.hexdigest
|
51
|
-
ensure
|
52
|
-
fd.close if fd
|
53
|
-
end
|
54
|
-
|
55
|
-
def download(url, output)
|
56
|
-
uri = URI(url)
|
57
|
-
digest = Digest::SHA1.new
|
58
|
-
tmp = "#{output}.tmp"
|
59
|
-
Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
|
60
|
-
request = Net::HTTP::Get.new(uri.path)
|
61
|
-
http.request(request) do |response|
|
62
|
-
fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
|
63
|
-
size = (response["content-length"].to_i || -1).to_f
|
64
|
-
count = 0
|
65
|
-
File.open(tmp, "w") do |fd|
|
66
|
-
response.read_body do |chunk|
|
67
|
-
fd.write(chunk)
|
68
|
-
digest << chunk
|
69
|
-
if size > 0 && $stdout.tty?
|
70
|
-
count += chunk.bytesize
|
71
|
-
$stdout.write(sprintf("\r%0.2f%%", count/size * 100))
|
72
|
-
end
|
73
|
-
end
|
74
|
-
end
|
75
|
-
$stdout.write("\r \r") if $stdout.tty?
|
76
|
-
end
|
77
|
-
end
|
78
|
-
|
79
|
-
File.rename(tmp, output)
|
80
|
-
|
81
|
-
return digest.hexdigest
|
82
|
-
rescue SocketError => e
|
83
|
-
puts "Failure while downloading #{url}: #{e}"
|
84
|
-
raise
|
85
|
-
ensure
|
86
|
-
File.unlink(tmp) if File.exist?(tmp)
|
87
|
-
end # def download
|
88
|
-
|
89
|
-
def untar(tarball, &block)
|
90
|
-
require "archive/tar/minitar"
|
91
|
-
tgz = Zlib::GzipReader.new(File.open(tarball))
|
92
|
-
# Pull out typesdb
|
93
|
-
tar = Archive::Tar::Minitar::Input.open(tgz)
|
94
|
-
tar.each do |entry|
|
95
|
-
path = block.call(entry)
|
96
|
-
next if path.nil?
|
97
|
-
parent = File.dirname(path)
|
98
|
-
|
99
|
-
mkdir_p parent unless File.directory?(parent)
|
100
|
-
|
101
|
-
# Skip this file if the output file is the same size
|
102
|
-
if entry.directory?
|
103
|
-
mkdir path unless File.directory?(path)
|
104
|
-
else
|
105
|
-
entry_mode = entry.instance_eval { @mode } & 0777
|
106
|
-
if File.exists?(path)
|
107
|
-
stat = File.stat(path)
|
108
|
-
# TODO(sissel): Submit a patch to archive-tar-minitar upstream to
|
109
|
-
# expose headers in the entry.
|
110
|
-
entry_size = entry.instance_eval { @size }
|
111
|
-
# If file sizes are same, skip writing.
|
112
|
-
next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
|
113
|
-
end
|
114
|
-
puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
|
115
|
-
File.open(path, "w") do |fd|
|
116
|
-
# eof? check lets us skip empty files. Necessary because the API provided by
|
117
|
-
# Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
|
118
|
-
# IO object. Something about empty files in this EntryStream causes
|
119
|
-
# IO.copy_stream to throw "can't convert nil into String" on JRuby
|
120
|
-
# TODO(sissel): File a bug about this.
|
121
|
-
while !entry.eof?
|
122
|
-
chunk = entry.read(16384)
|
123
|
-
fd.write(chunk)
|
124
|
-
end
|
125
|
-
#IO.copy_stream(entry, fd)
|
126
|
-
end
|
127
|
-
File.chmod(entry_mode, path)
|
128
|
-
end
|
129
|
-
end
|
130
|
-
tar.close
|
131
|
-
File.unlink(tarball) if File.file?(tarball)
|
132
|
-
end # def untar
|
133
|
-
|
134
|
-
def ungz(file)
|
135
|
-
|
136
|
-
outpath = file.gsub('.gz', '')
|
137
|
-
tgz = Zlib::GzipReader.new(File.open(file))
|
138
|
-
begin
|
139
|
-
File.open(outpath, "w") do |out|
|
140
|
-
IO::copy_stream(tgz, out)
|
141
|
-
end
|
142
|
-
File.unlink(file)
|
143
|
-
rescue
|
144
|
-
File.unlink(outpath) if File.file?(outpath)
|
145
|
-
raise
|
146
|
-
end
|
147
|
-
tgz.close
|
148
|
-
end
|
149
|
-
|
150
|
-
desc "Process any vendor files required for this plugin"
|
151
|
-
task "vendor" do |task, args|
|
152
|
-
|
153
|
-
@files.each do |file|
|
154
|
-
download = file_fetch(file['url'], file['sha1'])
|
155
|
-
if download =~ /.tar.gz/
|
156
|
-
prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
|
157
|
-
untar(download) do |entry|
|
158
|
-
if !file['files'].nil?
|
159
|
-
next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
|
160
|
-
out = entry.full_name.split("/").last
|
161
|
-
end
|
162
|
-
File.join('vendor', out)
|
163
|
-
end
|
164
|
-
elsif download =~ /.gz/
|
165
|
-
ungz(download)
|
166
|
-
end
|
167
|
-
end
|
168
|
-
|
169
|
-
end
|