s3file 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
data/.document ADDED
@@ -0,0 +1,5 @@
1
+ README.rdoc
2
+ lib/**/*.rb
3
+ bin/*
4
+ features/**/*.feature
5
+ LICENSE
data/.gitignore ADDED
@@ -0,0 +1,21 @@
1
+ ## MAC OS
2
+ .DS_Store
3
+
4
+ ## TEXTMATE
5
+ *.tmproj
6
+ tmtags
7
+
8
+ ## EMACS
9
+ *~
10
+ \#*
11
+ .\#*
12
+
13
+ ## VIM
14
+ *.swp
15
+
16
+ ## PROJECT::GENERAL
17
+ coverage
18
+ rdoc
19
+ pkg
20
+
21
+ ## PROJECT::SPECIFIC
data/LICENSE ADDED
@@ -0,0 +1,20 @@
1
+ Copyright (c) 2009 glen
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.rdoc ADDED
@@ -0,0 +1,40 @@
1
+ = S3File
2
+
3
+ Interact with your Amazon S3 bucket.
4
+ * Move objects between S3 and your system.
5
+ * Create buckets.
6
+ * Delete buckets.
7
+ * Fix buckets.
8
+
9
+ == Installation
10
+ The gem requires you to have s3cmd a Python library for transfer of files, installed. http://s3tools.org/s3cmd.
11
+
12
+ == Usage
13
+ The gem was designed to be similar to File class in Ruby. Thus you can use ls, cp_r, cp_rf, rm etc.
14
+ You will need to start out with setting at the minimum a
15
+ :access_key and a
16
+ :secret_access_key
17
+ so..
18
+ require 'rubygems'
19
+ require 's3file'
20
+ S3File.new(:access_key => 'blahblah', :secret_access_key => 'blahblah')
21
+
22
+ then use the code as you would normally would
23
+ S3File.ls("s3://bucket")
24
+ S3File.cp("s3://bucket/file.rb", ".")
25
+ S3File.cp_r("s3://bucket/files/", ".")
26
+ S3File.rm("s3://bucket/file.rb")
27
+
28
+ == Note on Patches/Pull Requests
29
+
30
+ * Fork the project.
31
+ * Make your feature addition or bug fix.
32
+ * Add tests for it. This is important so I don't break it in a
33
+ future version unintentionally.
34
+ * Commit, do not mess with rakefile, version, or history.
35
+ (if you want to have your own version, that is fine but bump version in a commit by itself I can ignore when I pull)
36
+ * Send me a pull request. Bonus points for topic branches.
37
+
38
+ == Copyright
39
+
40
+ Copyright (c) 2010 glen. See LICENSE for details.
data/Rakefile ADDED
@@ -0,0 +1,54 @@
1
+ require 'rubygems'
2
+ require 'rake'
3
+
4
+ begin
5
+ require 'jeweler'
6
+ Jeweler::Tasks.new do |gem|
7
+ gem.name = "s3file"
8
+ gem.summary = %Q{Use s3cmd to interact with Amazon's S3}
9
+ gem.description = %Q{Use methods similar to the File class of Ruby to interact with the S3 objects of your Amazon bucket}
10
+ gem.email = "glen.noronha@gmail.com"
11
+ gem.homepage = "http://github.com/glen/s3file"
12
+ gem.authors = ["glen"]
13
+ gem.add_development_dependency "open4", ">= 1.0.1"
14
+ gem.files.include %w(lib/s3file/*)
15
+ # gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
16
+ end
17
+ Jeweler::GemcutterTasks.new
18
+ rescue LoadError
19
+ puts "Jeweler (or a dependency) not available. Install it with: gem install jeweler"
20
+ end
21
+
22
+ require 'rake/testtask'
23
+ Rake::TestTask.new(:test) do |test|
24
+ test.libs << 'lib' << 'test'
25
+ test.pattern = 'test/**/test_*.rb'
26
+ test.verbose = true
27
+ end
28
+
29
+ begin
30
+ require 'rcov/rcovtask'
31
+ Rcov::RcovTask.new do |test|
32
+ test.libs << 'test'
33
+ test.pattern = 'test/**/test_*.rb'
34
+ test.verbose = true
35
+ end
36
+ rescue LoadError
37
+ task :rcov do
38
+ abort "RCov is not available. In order to run rcov, you must: sudo gem install spicycode-rcov"
39
+ end
40
+ end
41
+
42
+ task :test => :check_dependencies
43
+
44
+ task :default => :test
45
+
46
+ require 'rake/rdoctask'
47
+ Rake::RDocTask.new do |rdoc|
48
+ version = File.exist?('VERSION') ? File.read('VERSION') : ""
49
+
50
+ rdoc.rdoc_dir = 'rdoc'
51
+ rdoc.title = "s3file #{version}"
52
+ rdoc.rdoc_files.include('README*')
53
+ rdoc.rdoc_files.include('lib/**/*.rb')
54
+ end
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.1.0
@@ -0,0 +1,31 @@
1
+ class S3File
2
+ class << self
3
+ # Makes a bucket in S3.
4
+ # Raises a S3PathError in case bucket specified does not start with s3://
5
+ def make_bucket(bucket)
6
+ raise(S3PathError, "Invalid S3 path") unless s3?(bucket)
7
+ run_command("s3cmd mb #{bucket} -c #{@@config_file}")
8
+ end
9
+
10
+ # Removes a bucket from S3.
11
+ # Raises a S3PathError in case bucket specified does not start with s3://
12
+ def remove_bucket(bucket)
13
+ raise(S3PathError, "Invalid S3 path") unless s3?(bucket)
14
+ run_command("s3cmd rb #{bucket} -c #{@@config_file}")
15
+ end
16
+
17
+ # Gets the disk usage of the bucket.
18
+ # Raises a S3PathError in case bucket specified does not start with s3://
19
+ def disk_usage(bucket)
20
+ raise(S3PathError, "Invalid S3 path") unless s3?(bucket)
21
+ run_command("s3cmd du #{bucket} -c #{@@config_file}")
22
+ end
23
+
24
+ # Fixes any invalid file names in the bucket.
25
+ # Raises S3PathError in case bucket specified does not start with s3://
26
+ def fix_bucket(bucket)
27
+ raise(S3PathError, "Invalid S3 path") unless s3?(bucket)
28
+ run_command("s3cmd fixbucket #{bucket} -c #{@@config_file}")
29
+ end
30
+ end
31
+ end
data/lib/s3file/cp.rb ADDED
@@ -0,0 +1,43 @@
1
+ class S3File
2
+ class << self
3
+ # Copy either from local to S3 or the other way around. Used for copying files and not folders.
4
+ # Raises a S3CommandError in case attempt to copy a folder.
5
+ def cp(source, destination)
6
+ if s3?(source)
7
+ raise(S3CommandError, "Attempting to copy folder using cp. Use cp_r instead.") if source.match(/\/\z/)
8
+ run_command("s3cmd get #{source} #{destination} -c #{@@config_file}")
9
+ elsif local?(source)
10
+ run_command("s3cmd put #{source} #{destination} -c #{@@config_file}")
11
+ end
12
+ end
13
+
14
+ # Force copy from local to S3 or the other way around. This will over write local files.
15
+ # Raises a S3CommandError in case attempt to copy a folder.
16
+ def cp_f(source, destination)
17
+ if s3?(source)
18
+ raise(S3CommandError, "Attempting to copy folder using cp. Use cp_rf instead.") if source.match(/\/\z/)
19
+ run_command("s3cmd get #{source} #{destination} -c #{@@config_file} --force")
20
+ elsif local?(source)
21
+ run_command("s3cmd put #{source} #{destination} -c #{@@config_file} --force")
22
+ end
23
+ end
24
+
25
+ # Recursively copy from local to S3 or the other way around. This is used for copying of folders.
26
+ def cp_r(source, destination)
27
+ if s3?(source)
28
+ run_command("s3cmd get #{source} #{destination} -c #{@@config_file} --recursive")
29
+ elsif local?(source)
30
+ run_command("s3cmd put #{source} #{destination} -c #{@@config_file} --recursive")
31
+ end
32
+ end
33
+
34
+ # Recursively and forcefully copy a folder between S3 and local or the other way around.
35
+ def cp_rf(source, destination)
36
+ if s3?(source)
37
+ run_command("s3cmd get #{source} #{destination} -c #{@@config_file} --recursive --force")
38
+ elsif local?(source)
39
+ run_command("s3cmd put #{source} #{destination} -c #{@@config_file} --recursive --force")
40
+ end
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,25 @@
1
+ class S3File
2
+
3
+ # Raised when no access_key and secret_access_key is provided.
4
+ class InitializationError < StandardError; end
5
+
6
+ # Raised in case the keys are incorrect.
7
+ class AuthorizationError < StandardError; end
8
+
9
+ # Raised in case there is an error in the local file path
10
+ class PathError < StandardError; end
11
+
12
+ # Raised in case there is an error in the S3 path.
13
+ class S3PathError < StandardError; end
14
+
15
+ # Raised when there is an error in the S3 command. This could happen you are trying to copy
16
+ # a folder instead of a file using cp rather than ch_r
17
+ class S3CommandError < StandardError; end
18
+
19
+ # Raised when there is no connection to S3
20
+ class ConnectionError < StandardError; end
21
+
22
+ # Raised for any unknowns
23
+ class UnknownError < StandardError; end
24
+
25
+ end
data/lib/s3file/ls.rb ADDED
@@ -0,0 +1,59 @@
1
+ class S3File
2
+ class << self
3
+ # Gets a list of all the entries in a given directory
4
+ def ls(location=nil)
5
+ list(location, 'all')
6
+ end
7
+
8
+ # Gets a list of all the directories in the given location in the bucket
9
+ def ls_directories(location=nil)
10
+ list(location, 'dir')
11
+ end
12
+
13
+ # Gets a list of all the files in the given location in the bucket
14
+ def ls_files(location=nil)
15
+ list(location, 'file')
16
+ end
17
+ end
18
+ private
19
+ class << self
20
+ # Gets a list of all the files in the given location in the bucket
21
+ def list(location = nil, display = nil)
22
+ return [] if location.nil?
23
+ raise(PathError, "Not S3 Path") unless s3?(location)
24
+ # display can take 3 possible values "all", "dir" or "file"
25
+ # if display is nil or any other value other than dir or file then
26
+ # display = "all"
27
+ # if display is 'dir' or 'file' then
28
+ # display takes that value
29
+ if display.nil? || !display.match(/dir|file/)
30
+ display = "all"
31
+ end
32
+
33
+ directories = []
34
+ files = []
35
+ location += "/" if location == s3_bucket(location)
36
+ entries = run_command("s3cmd ls #{location} -c #{@@config_file}")
37
+
38
+ directory = s3_directory(location)
39
+ entries.split("\n").each do |entry|
40
+ if display.match(/all|dir/) && entry.strip.match(/DIR/)
41
+ dir = entry.strip.sub(/DIR/, '').strip.sub("#{directory}", "")
42
+ if dir == "/"
43
+ directories << (location + "/")
44
+ else
45
+ directories << dir
46
+ end
47
+ elsif display.match(/all|file/) && !entry.strip.match(/DIR/)
48
+ file = entry.strip.sub(/\d{4}-\d{2}-\d{2}\s*\d{2}:\d{2}\s*/, '').split(" ")[1].sub("#{directory}","")
49
+ if file == ""
50
+ files << location
51
+ else
52
+ files << file
53
+ end
54
+ end
55
+ end
56
+ directories + files
57
+ end
58
+ end
59
+ end
data/lib/s3file/mv.rb ADDED
@@ -0,0 +1,10 @@
1
+ class S3File
2
+ class << self
3
+ # Moves a file from a bucket to another.
4
+ # Requires that both source and destination be s3 location - raises S3PathError otherwise.
5
+ def mv(source, destination)
6
+ raise(S3PathError, "Either source or destination (or both) not S3") unless (s3?(source) && s3?(destination))
7
+ run_command("s3cmd mv #{source} #{destination} -c #{@@config_file}")
8
+ end
9
+ end
10
+ end
data/lib/s3file/rm.rb ADDED
@@ -0,0 +1,35 @@
1
+ class S3File
2
+ class << self
3
+ # Deletes the file from S3.
4
+ # Raises an S3CommandError in case of directory deletion. Use s3_r instead.
5
+ # Raises a S3PathError in case the location provided is not S3
6
+ def rm(location)
7
+ if s3?(location)
8
+ raise(S3CommandError, "Attempting to delete folder using rm. Use rm_r instead.") if source.match(/\/\z/)
9
+ run_command("s3cmd del #{location} -c #{@@config_file}")
10
+ else
11
+ raise(S3PathError, "Incorrect S3 path")
12
+ end
13
+ end
14
+
15
+ # Recursively deletes a folder from S3
16
+ # Raises a S3PathError in case the location provided is not S3
17
+ def rm_r(location)
18
+ if s3?(location)
19
+ run_command("s3cmd del #{location} -c #{@@config_file} --recursive")
20
+ else
21
+ raise(S3PathError, "Incorrect S3 path")
22
+ end
23
+ end
24
+
25
+ # Recursively and forcefully deletes a folder from S3
26
+ # Raises S3PathError in case the location provided is not an S3 location
27
+ def rm_rf(location)
28
+ if s3?(location)
29
+ run_command("s3cmd del #{location} -c #{@@config_file} --recursive --force")
30
+ else
31
+ raise(S3PathError, "Incorrect S3 path")
32
+ end
33
+ end
34
+ end
35
+ end
data/lib/s3file.rb ADDED
@@ -0,0 +1,149 @@
1
+ class S3File
2
+ require 'open4'
3
+ require 'ruby-debug'
4
+ # Require all the files in the lib/s3file
5
+ $:.unshift File.join(File.dirname(__FILE__),'..','lib')
6
+ require 's3file/cp'
7
+ require 's3file/errors'
8
+ require 's3file/ls'
9
+ require 's3file/rm'
10
+ require 's3file/bucket'
11
+ require 's3file/mv'
12
+
13
+ @@config_file = File.join(File.expand_path("~/"), ".s3file.cfg")
14
+
15
+ # Intialize the object to the bucket given the access_key and secret_access_key
16
+ # You need to have the following set
17
+ # *:access_key and
18
+ # *:secret_access_key
19
+ # s3cmd requires a config file to be created.
20
+ # Based upon the values provided - a .s3file.cfg is created in the users home directory.
21
+ # If the keys are not provided - it will raise an InitializationError
22
+ def initialize(keys = {})
23
+ if keys.nil? || keys[:access_key].nil? || keys[:secret_access_key].nil?
24
+ raise(InitializationError, "Keys not set when initializing S3 connection")
25
+ end
26
+ S3File.create_config_file(keys)
27
+ end
28
+
29
+ class << self
30
+ # Deletes the config file that was created.
31
+ def delete_config_file!
32
+ FileUtils.rm_f(@@config_file) if S3File.config_file_exists?
33
+ end
34
+
35
+ # Gets the md5 hash of the s3 file. This will be used to confirm if the copy occurred
36
+ # successfully or not.
37
+ def md5_hash(location)
38
+ raise(S3PathError, "Not an S3 location") unless s3?(location)
39
+ run_command("s3cmd --list-md5 ls #{location} -c #{@@config_file}").split(" ")[3]
40
+ end
41
+
42
+ end
43
+
44
+ private
45
+ class << self
46
+ # Returns true or false if s3cmd config file exists
47
+ def config_file_exists?
48
+ File.exists?(@@config_file)
49
+ end
50
+
51
+ # Creates the config_file
52
+ def create_config_file(keys={})
53
+ # Data to be put into the config file
54
+ config_data = %Q{[default]
55
+ access_key = #{keys[:access_key].to_s}
56
+ acl_public = False
57
+ bucket_location = US
58
+ cloudfront_host = cloudfront.amazonaws.com
59
+ cloudfront_resource = /2008-06-30/distribution
60
+ default_mime_type = binary/octet-stream
61
+ delete_removed = False
62
+ dry_run = False
63
+ encoding = UTF-8
64
+ encrypt = False
65
+ force = False
66
+ get_continue = False
67
+ gpg_command = /usr/bin/gpg
68
+ gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
69
+ gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
70
+ gpg_passphrase =
71
+ guess_mime_type = True
72
+ host_base = s3.amazonaws.com
73
+ host_bucket = %(bucket)s.s3.amazonaws.com
74
+ human_readable_sizes = False
75
+ list_md5 = False
76
+ preserve_attrs = True
77
+ progress_meter = True
78
+ proxy_host =
79
+ proxy_port = 0
80
+ recursive = False
81
+ recv_chunk = 4096
82
+ secret_key = #{keys[:secret_access_key]}
83
+ send_chunk = 4096
84
+ simpledb_host = sdb.amazonaws.com
85
+ skip_existing = False
86
+ urlencoding_mode = normal
87
+ use_https = False
88
+ verbosity = WARNING}
89
+
90
+ File.delete(@@config_file) if File.exists?(@@config_file)
91
+ file = File.new(@@config_file, "w")
92
+ file.puts config_data
93
+ file.close
94
+ end
95
+
96
+ # Checks if the location is on s3 or not
97
+ # Checking if the location starts with s3://
98
+ def s3?(location)
99
+ return true if location.match(/s3:\/\//)
100
+ return false
101
+ end
102
+
103
+
104
+ # Checks if the location is on local or not
105
+ # Checking that the location does not start with s3://
106
+ # Short cut - use the opposite of s3? to get the solution
107
+ def local?(location)
108
+ if s3?(location)
109
+ return false
110
+ else
111
+ return File.exists?(File.expand_path(location))
112
+ end
113
+ end
114
+
115
+ # Gets the directory of the file given the location
116
+ def s3_directory(location)
117
+ location.sub(%r{#{location.match(/[^\/]*\z/)}\z}, "")
118
+ end
119
+
120
+ # Gets the s3 bucket given a location
121
+ def s3_bucket(location)
122
+ location.match("s3://[^\/]*").to_s
123
+ end
124
+
125
+ #
126
+ def run_command(command)
127
+ output = status = err = nil
128
+ status =
129
+ Open4::popen4(command) do |pid, stdin, stdout, stderr|
130
+ output = stdout.read
131
+ err = stderr.read
132
+ end
133
+
134
+ if err.empty?
135
+ raise(PathError, "File not found") if output.empty?
136
+ return output
137
+ else
138
+ puts "Error #{err}"
139
+ raise(AuthorizationError, "Authorization keys failed") if err.match("S3 error: 403")
140
+ raise(S3PathError, "S3 file does not exist") if err.match("S3 error: 404")
141
+ raise(S3PathError, "S3 bucket does not exist") if err.match("does not exist")
142
+ raise(S3PathError, "S3 bucket does not exist") if err.match("was denied")
143
+ raise(PathError, "Local file already exists") if err.match("already exists")
144
+ raise(ConnectionError, "Check your connection and try again") if err.match("Errno -2")
145
+ raise(UnknownError, "Unknown Error caught - #{err}")
146
+ end
147
+ end
148
+ end
149
+ end
data/test/helper.rb ADDED
@@ -0,0 +1,11 @@
1
+ require 'rubygems'
2
+ require 'test/unit'
3
+ require 'shoulda'
4
+
5
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
6
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
7
+ require 's3file'
8
+ require 's3file/errors'
9
+
10
+ class Test::Unit::TestCase
11
+ end
@@ -0,0 +1,27 @@
1
+ require 'helper'
2
+
3
+ class TestS3file < Test::Unit::TestCase
4
+ def test_should_raise_InitializationError_if_no_arguments_provided
5
+ assert_raises S3File::InitializationError do
6
+ S3File.new()
7
+ end
8
+ end
9
+
10
+ def test_should_raise_InitializationError_if_access_key_not_provided
11
+ assert_raises S3File::InitializationError do
12
+ S3File.new(:secret_access_key => "blah")
13
+ end
14
+ end
15
+
16
+ def test_should_raise_InitializationError_if_secret_access_key_not_provided
17
+ assert_raises S3File::InitializationError do
18
+ S3File.new(:access_key => "blah")
19
+ end
20
+ end
21
+
22
+ def test_should_create_config_file_given_access_key_and_secret_access_key
23
+ S3File.delete_config_file!
24
+ S3File.new(:access_key => "blah", :secret_access_key => "blah")
25
+ assert_equal true, S3File.config_file_exists?
26
+ end
27
+ end
metadata ADDED
@@ -0,0 +1,98 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: s3file
3
+ version: !ruby/object:Gem::Version
4
+ hash: 27
5
+ prerelease: false
6
+ segments:
7
+ - 0
8
+ - 1
9
+ - 0
10
+ version: 0.1.0
11
+ platform: ruby
12
+ authors:
13
+ - glen
14
+ autorequire:
15
+ bindir: bin
16
+ cert_chain: []
17
+
18
+ date: 2010-07-20 00:00:00 +05:30
19
+ default_executable:
20
+ dependencies:
21
+ - !ruby/object:Gem::Dependency
22
+ name: open4
23
+ prerelease: false
24
+ requirement: &id001 !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ hash: 21
30
+ segments:
31
+ - 1
32
+ - 0
33
+ - 1
34
+ version: 1.0.1
35
+ type: :development
36
+ version_requirements: *id001
37
+ description: Use methods similar to the File class of Ruby to interact with the S3 objects of your Amazon bucket
38
+ email: glen.noronha@gmail.com
39
+ executables: []
40
+
41
+ extensions: []
42
+
43
+ extra_rdoc_files:
44
+ - LICENSE
45
+ - README.rdoc
46
+ files:
47
+ - .document
48
+ - .gitignore
49
+ - LICENSE
50
+ - README.rdoc
51
+ - Rakefile
52
+ - VERSION
53
+ - lib/s3file.rb
54
+ - lib/s3file/bucket.rb
55
+ - lib/s3file/cp.rb
56
+ - lib/s3file/errors.rb
57
+ - lib/s3file/ls.rb
58
+ - lib/s3file/mv.rb
59
+ - lib/s3file/rm.rb
60
+ - test/helper.rb
61
+ - test/test_s3file.rb
62
+ has_rdoc: true
63
+ homepage: http://github.com/glen/s3file
64
+ licenses: []
65
+
66
+ post_install_message:
67
+ rdoc_options:
68
+ - --charset=UTF-8
69
+ require_paths:
70
+ - lib
71
+ required_ruby_version: !ruby/object:Gem::Requirement
72
+ none: false
73
+ requirements:
74
+ - - ">="
75
+ - !ruby/object:Gem::Version
76
+ hash: 3
77
+ segments:
78
+ - 0
79
+ version: "0"
80
+ required_rubygems_version: !ruby/object:Gem::Requirement
81
+ none: false
82
+ requirements:
83
+ - - ">="
84
+ - !ruby/object:Gem::Version
85
+ hash: 3
86
+ segments:
87
+ - 0
88
+ version: "0"
89
+ requirements: []
90
+
91
+ rubyforge_project:
92
+ rubygems_version: 1.3.7
93
+ signing_key:
94
+ specification_version: 3
95
+ summary: Use s3cmd to interact with Amazon's S3
96
+ test_files:
97
+ - test/helper.rb
98
+ - test/test_s3file.rb