s3share 0.3 → 0.4
Sign up to get free protection for your applications and to get access to all the features.
- data/lib/s3share/runner.rb +29 -0
- data/lib/s3share/version.rb +1 -1
- metadata +4 -5
- data/lib/s3share/foo.diff +0 -124
data/lib/s3share/runner.rb
CHANGED
@@ -63,9 +63,13 @@ module S3Share
|
|
63
63
|
|
64
64
|
create_bucket_if_it_does_not_exist(bucket_name)
|
65
65
|
|
66
|
+
# TODO: Find a way to make this faster in the future
|
67
|
+
t = Thread.new { start_progress_counter }
|
68
|
+
|
66
69
|
AWS::S3::S3Object.store(@filename, open("#{@path}/#{@filename}"),
|
67
70
|
bucket_name,
|
68
71
|
:access => :public_read)
|
72
|
+
t.kill
|
69
73
|
|
70
74
|
url = "http://s3.amazonaws.com/#{bucket_name}/#{@filename}"
|
71
75
|
puts "\n #{@filename} uploaded to: #{url}\n\n"
|
@@ -111,5 +115,30 @@ module S3Share
|
|
111
115
|
puts "Bucket '#{bucket_name}' does not exist. Creating it..."
|
112
116
|
AWS::S3::Bucket.create(bucket_name)
|
113
117
|
end
|
118
|
+
|
119
|
+
# TODO: Cleanup.
|
120
|
+
def start_progress_counter
|
121
|
+
reset = "\r\e[0K"
|
122
|
+
str = "#{reset}Uploading"
|
123
|
+
|
124
|
+
while(true) do
|
125
|
+
print "#{str}"
|
126
|
+
sleep(0.1)
|
127
|
+
$stdout.flush
|
128
|
+
Thread.pass
|
129
|
+
print "#{str}."
|
130
|
+
sleep(0.1)
|
131
|
+
$stdout.flush
|
132
|
+
Thread.pass
|
133
|
+
print "#{str}.."
|
134
|
+
sleep(0.1)
|
135
|
+
$stdout.flush
|
136
|
+
Thread.pass
|
137
|
+
print "#{str}..."
|
138
|
+
sleep(0.1)
|
139
|
+
$stdout.flush
|
140
|
+
Thread.pass
|
141
|
+
end
|
142
|
+
end
|
114
143
|
end
|
115
144
|
end
|
data/lib/s3share/version.rb
CHANGED
metadata
CHANGED
@@ -1,12 +1,12 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: s3share
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
hash:
|
4
|
+
hash: 3
|
5
5
|
prerelease: false
|
6
6
|
segments:
|
7
7
|
- 0
|
8
|
-
-
|
9
|
-
version: "0.
|
8
|
+
- 4
|
9
|
+
version: "0.4"
|
10
10
|
platform: ruby
|
11
11
|
authors:
|
12
12
|
- Federico Builes
|
@@ -14,7 +14,7 @@ autorequire:
|
|
14
14
|
bindir: bin
|
15
15
|
cert_chain: []
|
16
16
|
|
17
|
-
date: 2010-12-
|
17
|
+
date: 2010-12-20 00:00:00 -05:00
|
18
18
|
default_executable:
|
19
19
|
dependencies:
|
20
20
|
- !ruby/object:Gem::Dependency
|
@@ -65,7 +65,6 @@ files:
|
|
65
65
|
- Rakefile
|
66
66
|
- LICENSE
|
67
67
|
- lib/s3share/args.rb
|
68
|
-
- lib/s3share/foo.diff
|
69
68
|
- lib/s3share/runner.rb
|
70
69
|
- lib/s3share/version.rb
|
71
70
|
- lib/s3share.rb
|
data/lib/s3share/foo.diff
DELETED
@@ -1,124 +0,0 @@
|
|
1
|
-
commit 75686151a5525e3d8bbaeee9474f2312f9a2abe0
|
2
|
-
Author: Andrés Mejía <andmej@gmail.com>
|
3
|
-
Date: Sun Nov 28 23:06:05 2010 +0100
|
4
|
-
|
5
|
-
If the bucket you are trying to use doesn't exist yet, create it automatically.
|
6
|
-
|
7
|
-
diff --git a/README.markdown b/README.markdown
|
8
|
-
index 2fade33..39400f2 100644
|
9
|
-
--- a/README.markdown
|
10
|
-
+++ b/README.markdown
|
11
|
-
@@ -29,7 +29,7 @@ You'll need to set the three following ENV variables:
|
12
|
-
* `AMAZON_SECRET_ACCESS_KEY`: AWS secret access key.
|
13
|
-
* `AMAZON_S3_DEFAULT_BUCKET`: Name of the bucket where the uploads will be held.
|
14
|
-
|
15
|
-
-The last variable is visible in the URL returned to the user: `http://s3.amazonaws.com/{AMAZON_S3_DEFAULT_BUCKET}/some_photo.png`, so make sure you choose something pretty. This value is global for all the S3 namespace, meaning you need to find something unique between all the S3 users ("some-user-name_uploads" should do the trick).
|
16
|
-
+The last variable is visible in the URL returned to the user: `http://s3.amazonaws.com/{AMAZON_S3_DEFAULT_BUCKET}/some_photo.png`, so make sure you choose something pretty. This value is global for all the S3 namespace, meaning you need to find something unique between all the S3 users ("some-user-name_uploads" should do the trick). If the specified bucket doesn't exist, it will be automatically created.
|
17
|
-
|
18
|
-
You can set these variables in a `~/.amazon_keys` file:
|
19
|
-
|
20
|
-
diff --git a/lib/s3share/runner.rb b/lib/s3share/runner.rb
|
21
|
-
index e0c5dc7..e9cb328 100644
|
22
|
-
--- a/lib/s3share/runner.rb
|
23
|
-
+++ b/lib/s3share/runner.rb
|
24
|
-
@@ -60,6 +60,8 @@ module S3Share
|
25
|
-
:access_key_id => access_key,
|
26
|
-
:secret_access_key => secret_key
|
27
|
-
)
|
28
|
-
+
|
29
|
-
+ create_bucket_if_it_does_not_exist(bucket_name)
|
30
|
-
|
31
|
-
AWS::S3::S3Object.store(@filename, open("#{@path}/#{@filename}"),
|
32
|
-
bucket_name,
|
33
|
-
@@ -100,5 +102,14 @@ module S3Share
|
34
|
-
}
|
35
|
-
errors[err].each { |msg| puts msg }
|
36
|
-
end
|
37
|
-
+
|
38
|
-
+ private
|
39
|
-
+ # Check if the bucket exists and create it if it doesn't.
|
40
|
-
+ def create_bucket_if_it_does_not_exist(bucket_name)
|
41
|
-
+ AWS::S3::Bucket.find(bucket_name)
|
42
|
-
+ rescue AWS::S3::NoSuchBucket => e
|
43
|
-
+ puts "Bucket '#{bucket_name}' does not exist. Creating it..."
|
44
|
-
+ AWS::S3::Bucket.create(bucket_name)
|
45
|
-
+ end
|
46
|
-
end
|
47
|
-
end
|
48
|
-
diff --git a/spec/runner_spec.rb b/spec/runner_spec.rb
|
49
|
-
index cf28997..50d660d 100644
|
50
|
-
--- a/spec/runner_spec.rb
|
51
|
-
+++ b/spec/runner_spec.rb
|
52
|
-
@@ -6,9 +6,10 @@ describe S3Share::Runner do
|
53
|
-
S3Share::Runner.new(filename)
|
54
|
-
end
|
55
|
-
|
56
|
-
- let(:relative) { runner("something/spec.opts") }
|
57
|
-
- let(:absolute) { runner("/Users/someone/something/spec.opts") }
|
58
|
-
- let(:file) { runner("spec.opts") }
|
59
|
-
+ let(:relative) { runner("something/spec.opts") }
|
60
|
-
+ let(:absolute) { runner("/Users/someone/something/spec.opts") }
|
61
|
-
+ let(:file) { runner("spec.opts") }
|
62
|
-
+ let(:existing_file) { runner(__FILE__) }
|
63
|
-
|
64
|
-
describe "#get_directory" do
|
65
|
-
it "correctly expands a relative path" do
|
66
|
-
@@ -37,5 +38,31 @@ describe S3Share::Runner do
|
67
|
-
file.clean_filename.should == "spec.opts"
|
68
|
-
end
|
69
|
-
end
|
70
|
-
+
|
71
|
-
+ describe "#upload_file" do
|
72
|
-
+ it "checks if the bucket exists before starting the upload" do
|
73
|
-
+ # Stub this method so we don't try a real upload.
|
74
|
-
+ AWS::S3::S3Object.stub!(:store).and_return(nil)
|
75
|
-
+
|
76
|
-
+ ENV["AMAZON_S3_DEFAULT_BUCKET"] = "an_imaginary_bucket"
|
77
|
-
+ existing_file.should_receive(:create_bucket_if_it_does_not_exist).with("an_imaginary_bucket").and_return(:nil)
|
78
|
-
+ silence_stream(STDOUT) { existing_file.upload_file }
|
79
|
-
+ end
|
80
|
-
+ end
|
81
|
-
+
|
82
|
-
+ describe "#create_bucket_if_it_does_not_exist" do
|
83
|
-
+ it "calls AWS::S3::Bucket.find to see if the bucket exists" do
|
84
|
-
+ AWS::S3::Bucket.should_receive(:find).with("the_bucket").and_return(nil)
|
85
|
-
+ existing_file.send :create_bucket_if_it_does_not_exist, "the_bucket"
|
86
|
-
+ end
|
87
|
-
+
|
88
|
-
+ it "calls AWS::S3::Bucket.create if the bucket doesn't exist" do
|
89
|
-
+ AWS::S3::Base.establish_connection!(:access_key_id => ENV["AMAZON_ACCESS_KEY_ID"], :secret_access_key => ENV["AMAZON_SECRET_ACCESS_KEY"])
|
90
|
-
+ AWS::S3::Bucket.should_receive(:create).with("an_imaginary_bucket_that_surely_does_not_exist").and_return(nil)
|
91
|
-
+ silence_stream(STDOUT) do
|
92
|
-
+ existing_file.send :create_bucket_if_it_does_not_exist, "an_imaginary_bucket_that_surely_does_not_exist"
|
93
|
-
+ end
|
94
|
-
+ end
|
95
|
-
+ end
|
96
|
-
end
|
97
|
-
|
98
|
-
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
|
99
|
-
index 205e799..c3946ff 100644
|
100
|
-
--- a/spec/spec_helper.rb
|
101
|
-
+++ b/spec/spec_helper.rb
|
102
|
-
@@ -1,5 +1,20 @@
|
103
|
-
$LOAD_PATH.unshift File.join(File.dirname(__FILE__), '..', 'lib')
|
104
|
-
|
105
|
-
-
|
106
|
-
require 'rspec'
|
107
|
-
require 's3share'
|
108
|
-
+
|
109
|
-
+# Silences any stream for the duration of the block.
|
110
|
-
+#
|
111
|
-
+# silence_stream(STDOUT) do
|
112
|
-
+# puts 'This will never be seen'
|
113
|
-
+# end
|
114
|
-
+#
|
115
|
-
+# puts 'But this will'
|
116
|
-
+def silence_stream(stream)
|
117
|
-
+ old_stream = stream.dup
|
118
|
-
+ stream.reopen('/dev/null')
|
119
|
-
+ stream.sync = true
|
120
|
-
+ yield
|
121
|
-
+ensure
|
122
|
-
+ stream.reopen(old_stream)
|
123
|
-
+end
|
124
|
-
|