s3ftp 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Gemfile +3 -0
- data/MIT-LICENSE +21 -0
- data/README.markdown +36 -0
- data/lib/happening/s3/bucket.rb +116 -0
- data/lib/s3ftp.rb +11 -0
- data/lib/s3ftp/driver.rb +245 -0
- metadata +113 -0
data/Gemfile
ADDED
data/MIT-LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
Copyright (c) 2009 Peter Jones
|
2
|
+
Copyright (c) 2009 James Healy
|
3
|
+
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
5
|
+
a copy of this software and associated documentation files (the
|
6
|
+
"Software"), to deal in the Software without restriction, including
|
7
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
8
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
9
|
+
permit persons to whom the Software is furnished to do so, subject to
|
10
|
+
the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be
|
13
|
+
included in all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
16
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
17
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
18
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
19
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
20
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
21
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.markdown
ADDED
@@ -0,0 +1,36 @@
|
|
1
|
+
# s3ftp
|
2
|
+
|
3
|
+
A mini-FTP server that persists all data to Amazon S3.
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
gem install s3ftp
|
8
|
+
|
9
|
+
## Configuration
|
10
|
+
|
11
|
+
1. Upload a passwd file to your S3 bucket. It should contain a single line
|
12
|
+
per user and be a CSV. It should look something like this
|
13
|
+
|
14
|
+
user1,password,y
|
15
|
+
user2,password,n
|
16
|
+
|
17
|
+
the third column indicates the users administrator status. Administrators can
|
18
|
+
see all files. Regular users are sandboxed to their own directory.
|
19
|
+
|
20
|
+
2. Create a config.rb file that looks something like this
|
21
|
+
|
22
|
+
require 's3ftp'
|
23
|
+
|
24
|
+
AWS_KEY = 'foo'
|
25
|
+
AWS_SECRET = 'bar'
|
26
|
+
AWS_BUCKET = 'my-ftp-bucket'
|
27
|
+
|
28
|
+
driver S3FTP::Driver
|
29
|
+
driver_args AWS_KEY, AWS_SECRET, AWS_BUCKET
|
30
|
+
|
31
|
+
3. As root, run 'em-ftpd config.rb'
|
32
|
+
|
33
|
+
## License
|
34
|
+
|
35
|
+
This library is distributed under the terms of the MIT License. See the included file for
|
36
|
+
more detail.
|
@@ -0,0 +1,116 @@
|
|
1
|
+
require 'uri'
|
2
|
+
require 'cgi'
|
3
|
+
|
4
|
+
module Happening
|
5
|
+
module S3
|
6
|
+
class Bucket
|
7
|
+
include Utils
|
8
|
+
|
9
|
+
REQUIRED_FIELDS = [:server]
|
10
|
+
VALID_HEADERS = ['Cache-Control', 'Content-Disposition', 'Content-Encoding', 'Content-Length', 'Content-MD5', 'Content-Type', 'Expect', 'Expires']
|
11
|
+
|
12
|
+
attr_accessor :bucket, :options
|
13
|
+
|
14
|
+
def initialize(bucket, options = {})
|
15
|
+
@options = {
|
16
|
+
:timeout => 10,
|
17
|
+
:server => 's3.amazonaws.com',
|
18
|
+
:protocol => 'https',
|
19
|
+
:aws_access_key_id => nil,
|
20
|
+
:aws_secret_access_key => nil,
|
21
|
+
:retry_count => 4,
|
22
|
+
:permissions => 'private',
|
23
|
+
:ssl => Happening::S3.ssl_options,
|
24
|
+
:prefix => nil,
|
25
|
+
:delimiter => nil
|
26
|
+
}.update(symbolize_keys(options))
|
27
|
+
assert_valid_keys(options, :timeout, :server, :protocol, :aws_access_key_id, :aws_secret_access_key, :retry_count, :permissions, :ssl, :prefix, :delimiter)
|
28
|
+
@bucket = bucket.to_s
|
29
|
+
|
30
|
+
validate
|
31
|
+
end
|
32
|
+
|
33
|
+
def get(request_options = {}, &blk)
|
34
|
+
headers = needs_to_sign? ? aws.sign("GET", path_with_query) : {}
|
35
|
+
request_options[:on_success] = blk if blk
|
36
|
+
request_options.update(:headers => headers)
|
37
|
+
Happening::S3::Request.new(:get, url, {:ssl => options[:ssl]}.update(request_options)).execute
|
38
|
+
end
|
39
|
+
|
40
|
+
def url
|
41
|
+
URI::Generic.new(options[:protocol], nil, server, port, nil, path(!dns_bucket?), nil, query_string, nil).to_s
|
42
|
+
end
|
43
|
+
|
44
|
+
def server
|
45
|
+
dns_bucket? ? "#{bucket}.#{options[:server]}" : options[:server]
|
46
|
+
end
|
47
|
+
|
48
|
+
def path(with_bucket=true)
|
49
|
+
with_bucket ? "/#{bucket}/" : "/"
|
50
|
+
end
|
51
|
+
|
52
|
+
def path_with_query(with_bucket=true)
|
53
|
+
base = path(with_bucket)
|
54
|
+
query = query_string
|
55
|
+
query ? "#{base}?#{query}" : base
|
56
|
+
end
|
57
|
+
|
58
|
+
protected
|
59
|
+
|
60
|
+
def query_string
|
61
|
+
if @options[:prefix] || @options[:delimiter]
|
62
|
+
str = ""
|
63
|
+
str += "prefix=#{CGI::escape(@options[:prefix])}&" if @options[:prefix]
|
64
|
+
str += "delimiter=#{CGI::escape(@options[:delimiter])}&" if @options[:delimiter]
|
65
|
+
str.gsub(/&\Z/,"")
|
66
|
+
else
|
67
|
+
nil
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def needs_to_sign?
|
72
|
+
present?(options[:aws_access_key_id])
|
73
|
+
end
|
74
|
+
|
75
|
+
def dns_bucket?
|
76
|
+
# http://docs.amazonwebservices.com/AmazonS3/2006-03-01/index.html?BucketRestrictions.html
|
77
|
+
return false unless (3..63) === bucket.size
|
78
|
+
bucket.split('.').each do |component|
|
79
|
+
return false unless component[/^[a-z0-9]([a-z0-9-]*[a-z0-9])?$/]
|
80
|
+
end
|
81
|
+
true
|
82
|
+
end
|
83
|
+
|
84
|
+
def port
|
85
|
+
(options[:protocol].to_s == 'https') ? 443 : 80
|
86
|
+
end
|
87
|
+
|
88
|
+
def validate
|
89
|
+
raise ArgumentError, "need a bucket name" unless present?(bucket)
|
90
|
+
|
91
|
+
REQUIRED_FIELDS.each do |field|
|
92
|
+
raise ArgumentError, "need field #{field}" unless present?(options[field])
|
93
|
+
end
|
94
|
+
|
95
|
+
raise ArgumentError, "unknown protocol #{options[:protocol]}" unless ['http', 'https'].include?(options[:protocol])
|
96
|
+
end
|
97
|
+
|
98
|
+
def aws
|
99
|
+
@aws ||= Happening::AWS.new(options[:aws_access_key_id], options[:aws_secret_access_key])
|
100
|
+
end
|
101
|
+
|
102
|
+
def construct_aws_headers(http_method, headers = {})
|
103
|
+
unless headers.keys.all?{|header| VALID_HEADERS.include?(header) || header.to_s.match(/\Ax-amz-/) }
|
104
|
+
raise ArgumentError, "invalid headers. All headers must either one of #{VALID_HEADERS} or start with 'x-amz-'"
|
105
|
+
end
|
106
|
+
|
107
|
+
permissions = options[:permissions] != 'private' ? {'x-amz-acl' => options[:permissions] } : {}
|
108
|
+
headers.update(permissions)
|
109
|
+
headers.update({'url' => path})
|
110
|
+
|
111
|
+
headers = needs_to_sign? ? aws.sign(http_method, path, headers) : headers
|
112
|
+
end
|
113
|
+
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
data/lib/s3ftp.rb
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
require 'em-ftpd'
|
2
|
+
require 'happening'
|
3
|
+
require 'csv'
|
4
|
+
require 'nokogiri'
|
5
|
+
|
6
|
+
# load our happening extension for interacting with buckets. Hopefully
|
7
|
+
# it will be accepted upstream soon and can be dropped
|
8
|
+
require 'happening/s3/bucket'
|
9
|
+
|
10
|
+
# and now the secret sauce
|
11
|
+
require 's3ftp/driver'
|
data/lib/s3ftp/driver.rb
ADDED
@@ -0,0 +1,245 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
|
3
|
+
require 'tempfile'
|
4
|
+
|
5
|
+
module S3FTP
|
6
|
+
class Driver
|
7
|
+
|
8
|
+
USER = 0
|
9
|
+
PASS = 1
|
10
|
+
ADMIN = 2
|
11
|
+
|
12
|
+
def initialize(key, secret, bucket)
|
13
|
+
@aws_key, @aws_secret, @aws_bucket = key, secret, bucket
|
14
|
+
end
|
15
|
+
|
16
|
+
def change_dir(path, &block)
|
17
|
+
prefix = scoped_path(path)
|
18
|
+
|
19
|
+
item = Happening::S3::Bucket.new(@aws_bucket, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret, :prefix => prefix, :delimiter => "/")
|
20
|
+
item.get do |response|
|
21
|
+
yield contains_directory?(response.response, prefix)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
def dir_contents(path, &block)
|
26
|
+
prefix = scoped_path_with_trailing_slash(path)
|
27
|
+
|
28
|
+
on_error = Proc.new {|response| yield false }
|
29
|
+
on_success = Proc.new {|response| yield response.response_header["CONTENT_LENGTH"].to_i }
|
30
|
+
|
31
|
+
item = Happening::S3::Bucket.new(@aws_bucket, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret, :prefix => prefix, :delimiter => "/")
|
32
|
+
item.get do |response|
|
33
|
+
yield parse_bucket_list(response.response)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def authenticate(user, pass, &block)
|
38
|
+
download_passwd_file do |passwd|
|
39
|
+
@users = extract_users(passwd)
|
40
|
+
|
41
|
+
if @users[user] && @users[user][:pass] == pass
|
42
|
+
@user = user
|
43
|
+
yield true
|
44
|
+
else
|
45
|
+
yield false
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
def bytes(path, &block)
|
51
|
+
key = scoped_path(path)
|
52
|
+
|
53
|
+
on_error = Proc.new {|response| yield false }
|
54
|
+
on_success = Proc.new {|response| yield response.response_header["CONTENT_LENGTH"].to_i }
|
55
|
+
|
56
|
+
item = Happening::S3::Item.new(@aws_bucket, key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
57
|
+
item.head(:retry_count => 0, :on_success => on_success, :on_error => on_error)
|
58
|
+
end
|
59
|
+
|
60
|
+
def get_file(path, &block)
|
61
|
+
key = scoped_path(path)
|
62
|
+
|
63
|
+
# open a tempfile to store the file as it's downloaded from S3.
|
64
|
+
# em-ftpd will close it for us
|
65
|
+
tmpfile = Tempfile.new("s3ftp")
|
66
|
+
|
67
|
+
on_error = Proc.new {|response| yield false }
|
68
|
+
on_success = Proc.new {|response|
|
69
|
+
tmpfile.flush
|
70
|
+
tmpfile.seek(0)
|
71
|
+
yield tmpfile
|
72
|
+
}
|
73
|
+
|
74
|
+
item = Happening::S3::Item.new(@aws_bucket, key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
75
|
+
item.get(:retry_count => 1, :on_success => on_success, :on_error => on_error).stream do |chunk|
|
76
|
+
tmpfile.write chunk
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def put_file(path, tmp_path, &block)
|
81
|
+
key = scoped_path(path)
|
82
|
+
|
83
|
+
bytes = File.size(tmp_path)
|
84
|
+
on_error = Proc.new {|response| yield false }
|
85
|
+
on_success = Proc.new {|response| yield bytes }
|
86
|
+
|
87
|
+
item = Happening::S3::Item.new(@aws_bucket, key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
88
|
+
item.put(File.binread(tmp_path), :retry_count => 0, :on_success => on_success, :on_error => on_error)
|
89
|
+
end
|
90
|
+
|
91
|
+
def delete_file(path, &block)
|
92
|
+
key = scoped_path(path)
|
93
|
+
|
94
|
+
on_error = Proc.new {|response| yield false }
|
95
|
+
on_success = Proc.new {|response| yield true }
|
96
|
+
|
97
|
+
item = Happening::S3::Item.new(@aws_bucket, key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
98
|
+
item.delete(:retry_count => 1, :on_success => on_success, :on_error => on_error)
|
99
|
+
end
|
100
|
+
|
101
|
+
def delete_dir(path, &block)
|
102
|
+
prefix = scoped_path(path)
|
103
|
+
|
104
|
+
on_error = Proc.new {|response| yield false }
|
105
|
+
|
106
|
+
item = Happening::S3::Bucket.new(@aws_bucket, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret, :prefix => prefix)
|
107
|
+
item.get(:on_error => on_error) do |response|
|
108
|
+
keys = bucket_list_to_full_keys(response.response)
|
109
|
+
delete_object = Proc.new { |key, iter|
|
110
|
+
item = Happening::S3::Item.new(@aws_bucket, key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
111
|
+
item.delete(:retry_count => 1, :on_error => on_error) do |response|
|
112
|
+
iter.next
|
113
|
+
end
|
114
|
+
}
|
115
|
+
on_complete = Proc.new { yield true }
|
116
|
+
|
117
|
+
EM::Iterator.new(keys, 5).each(delete_object, on_complete)
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
def rename(from, to, &block)
|
122
|
+
source_key = scoped_path(from)
|
123
|
+
source_obj = @aws_bucket + "/" + source_key
|
124
|
+
dest_key = scoped_path(to)
|
125
|
+
|
126
|
+
on_error = Proc.new {|response| yield false }
|
127
|
+
on_success = Proc.new {|response| yield true }
|
128
|
+
|
129
|
+
item = Happening::S3::Item.new(@aws_bucket, dest_key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
130
|
+
item.put(nil, :retry_count => 1, :on_error => on_error, :headers => {"x-amz-copy-source" => source_obj}) do |response|
|
131
|
+
item = Happening::S3::Item.new(@aws_bucket, source_key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
132
|
+
item.delete(:retry_count => 1, :on_success => on_success, :on_error => on_error)
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
def make_dir(path, &block)
|
137
|
+
key = scoped_path(path) + "/.dir"
|
138
|
+
|
139
|
+
on_error = Proc.new {|response| yield false }
|
140
|
+
on_success = Proc.new {|response| yield true }
|
141
|
+
|
142
|
+
item = Happening::S3::Item.new(@aws_bucket, key, :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
143
|
+
item.put("", :retry_count => 0, :on_success => on_success, :on_error => on_error)
|
144
|
+
end
|
145
|
+
|
146
|
+
private
|
147
|
+
|
148
|
+
def extract_users(passwd)
|
149
|
+
users = {}
|
150
|
+
CSV.parse(passwd).each { |row|
|
151
|
+
users[row[USER]] = {
|
152
|
+
:pass => row[PASS],
|
153
|
+
:admin => row[ADMIN].to_s.upcase == "Y"
|
154
|
+
}
|
155
|
+
}
|
156
|
+
users
|
157
|
+
end
|
158
|
+
|
159
|
+
def download_passwd_file(&block)
|
160
|
+
on_error = Proc.new { |response|
|
161
|
+
yield false
|
162
|
+
}
|
163
|
+
on_success = Proc.new { |response|
|
164
|
+
yield response.response
|
165
|
+
}
|
166
|
+
item = Happening::S3::Item.new(@aws_bucket, 'passwd', :aws_access_key_id => @aws_key, :aws_secret_access_key => @aws_secret)
|
167
|
+
item.get(:on_success => on_success, :on_error => on_error)
|
168
|
+
end
|
169
|
+
|
170
|
+
def admin?
|
171
|
+
@users[@user] && @users[@user][:admin]
|
172
|
+
end
|
173
|
+
|
174
|
+
def scoped_path_with_trailing_slash(path)
|
175
|
+
path = scoped_path(path)
|
176
|
+
path += "/" if path[-1,1] != "/"
|
177
|
+
path == "/" ? nil : path
|
178
|
+
end
|
179
|
+
|
180
|
+
def scoped_path(path)
|
181
|
+
path = "" if path == "/"
|
182
|
+
|
183
|
+
if admin?
|
184
|
+
File.join("/", path)[1,1024]
|
185
|
+
else
|
186
|
+
File.join("/", @user, path)[1,1024]
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
190
|
+
def bucket_list_to_full_keys(xml)
|
191
|
+
doc = Nokogiri::XML(xml)
|
192
|
+
doc.remove_namespaces!
|
193
|
+
doc.xpath('//Contents').map { |node|
|
194
|
+
node.xpath('./Key').first.content
|
195
|
+
}
|
196
|
+
end
|
197
|
+
|
198
|
+
def contains_directory?(xml, path)
|
199
|
+
doc = Nokogiri::XML(xml)
|
200
|
+
doc.remove_namespaces!
|
201
|
+
prefix = doc.xpath('/ListBucketResult/Prefix').first.content
|
202
|
+
|
203
|
+
doc.xpath('//CommonPrefixes').any? { |node|
|
204
|
+
name = node.xpath('./Prefix').first.content
|
205
|
+
|
206
|
+
name.to_s.start_with?(prefix)
|
207
|
+
}
|
208
|
+
end
|
209
|
+
|
210
|
+
def parse_bucket_list(xml)
|
211
|
+
doc = Nokogiri::XML(xml)
|
212
|
+
doc.remove_namespaces!
|
213
|
+
prefix = doc.xpath('/ListBucketResult/Prefix').first.content
|
214
|
+
files = doc.xpath('//Contents').select { |node|
|
215
|
+
name = node.xpath('./Key').first.content
|
216
|
+
bytes = node.xpath('./Size').first.content.to_i
|
217
|
+
name != prefix && bytes > 0
|
218
|
+
}.map { |node|
|
219
|
+
name = node.xpath('./Key').first.content
|
220
|
+
bytes = node.xpath('./Size').first.content
|
221
|
+
file_item(name[prefix.size, 1024], bytes)
|
222
|
+
}
|
223
|
+
dirs = doc.xpath('//CommonPrefixes').select { |node|
|
224
|
+
node.xpath('./Prefix').first.content != prefix + "/"
|
225
|
+
}.map { |node|
|
226
|
+
name = node.xpath('./Prefix').first.content
|
227
|
+
dir_item(name[prefix.size, 1024].tr("/",""))
|
228
|
+
}
|
229
|
+
default_dirs + dirs + files
|
230
|
+
end
|
231
|
+
|
232
|
+
def default_dirs
|
233
|
+
[dir_item("."), dir_item("..")]
|
234
|
+
end
|
235
|
+
|
236
|
+
def dir_item(name)
|
237
|
+
EM::FTPD::DirectoryItem.new(:name => name, :directory => true, :size => 0)
|
238
|
+
end
|
239
|
+
|
240
|
+
def file_item(name, bytes)
|
241
|
+
EM::FTPD::DirectoryItem.new(:name => name, :directory => false, :size => bytes)
|
242
|
+
end
|
243
|
+
|
244
|
+
end
|
245
|
+
end
|
metadata
ADDED
@@ -0,0 +1,113 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: s3ftp
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
prerelease:
|
6
|
+
platform: ruby
|
7
|
+
authors:
|
8
|
+
- James Healy
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2011-11-18 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: rake
|
16
|
+
requirement: &21376320 !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ! '>='
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '0'
|
22
|
+
type: :development
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: *21376320
|
25
|
+
- !ruby/object:Gem::Dependency
|
26
|
+
name: rspec
|
27
|
+
requirement: &21375560 !ruby/object:Gem::Requirement
|
28
|
+
none: false
|
29
|
+
requirements:
|
30
|
+
- - ~>
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '2.6'
|
33
|
+
type: :development
|
34
|
+
prerelease: false
|
35
|
+
version_requirements: *21375560
|
36
|
+
- !ruby/object:Gem::Dependency
|
37
|
+
name: em-ftpd
|
38
|
+
requirement: &21374880 !ruby/object:Gem::Requirement
|
39
|
+
none: false
|
40
|
+
requirements:
|
41
|
+
- - ! '>='
|
42
|
+
- !ruby/object:Gem::Version
|
43
|
+
version: '0'
|
44
|
+
type: :runtime
|
45
|
+
prerelease: false
|
46
|
+
version_requirements: *21374880
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: happening
|
49
|
+
requirement: &21374100 !ruby/object:Gem::Requirement
|
50
|
+
none: false
|
51
|
+
requirements:
|
52
|
+
- - ! '>='
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
type: :runtime
|
56
|
+
prerelease: false
|
57
|
+
version_requirements: *21374100
|
58
|
+
- !ruby/object:Gem::Dependency
|
59
|
+
name: nokogiri
|
60
|
+
requirement: &21373140 !ruby/object:Gem::Requirement
|
61
|
+
none: false
|
62
|
+
requirements:
|
63
|
+
- - ! '>='
|
64
|
+
- !ruby/object:Gem::Version
|
65
|
+
version: '0'
|
66
|
+
type: :runtime
|
67
|
+
prerelease: false
|
68
|
+
version_requirements: *21373140
|
69
|
+
description: Run an FTP server that persists all data to an Amazon S3 bucket
|
70
|
+
email:
|
71
|
+
- jimmy@deefa.com
|
72
|
+
executables: []
|
73
|
+
extensions: []
|
74
|
+
extra_rdoc_files:
|
75
|
+
- README.markdown
|
76
|
+
- MIT-LICENSE
|
77
|
+
files:
|
78
|
+
- lib/happening/s3/bucket.rb
|
79
|
+
- lib/s3ftp.rb
|
80
|
+
- lib/s3ftp/driver.rb
|
81
|
+
- Gemfile
|
82
|
+
- README.markdown
|
83
|
+
- MIT-LICENSE
|
84
|
+
homepage: http://github.com/yob/s3ftp
|
85
|
+
licenses: []
|
86
|
+
post_install_message:
|
87
|
+
rdoc_options:
|
88
|
+
- --title
|
89
|
+
- S3-FTP Documentation
|
90
|
+
- --main
|
91
|
+
- README.markdown
|
92
|
+
- -q
|
93
|
+
require_paths:
|
94
|
+
- lib
|
95
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
96
|
+
none: false
|
97
|
+
requirements:
|
98
|
+
- - ! '>='
|
99
|
+
- !ruby/object:Gem::Version
|
100
|
+
version: 1.9.2
|
101
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
102
|
+
none: false
|
103
|
+
requirements:
|
104
|
+
- - ! '>='
|
105
|
+
- !ruby/object:Gem::Version
|
106
|
+
version: '0'
|
107
|
+
requirements: []
|
108
|
+
rubyforge_project:
|
109
|
+
rubygems_version: 1.8.11
|
110
|
+
signing_key:
|
111
|
+
specification_version: 3
|
112
|
+
summary: An FTP proxy in front of an Amazon S3 bucket
|
113
|
+
test_files: []
|