s3batch 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- data/lib/s3batch/batch_upload.rb +86 -0
- data/lib/s3batch/happening_patch.rb +27 -0
- data/lib/s3batch/multi_delete.rb +38 -0
- data/lib/s3batch.rb +18 -0
- metadata +72 -0
@@ -0,0 +1,86 @@
|
|
1
|
+
module S3Batch
|
2
|
+
|
3
|
+
class TaskManager
|
4
|
+
def initialize
|
5
|
+
@count = 0
|
6
|
+
@adding_ended = false
|
7
|
+
end
|
8
|
+
|
9
|
+
def add
|
10
|
+
@count += 1
|
11
|
+
end
|
12
|
+
|
13
|
+
def remove
|
14
|
+
@count -= 1
|
15
|
+
EM.stop if @count == 0 && @adding_ended
|
16
|
+
end
|
17
|
+
|
18
|
+
def end_adding
|
19
|
+
@adding_ended = true
|
20
|
+
EM.stop if @count == 0
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
class UploadCollection
|
25
|
+
def initialize(bucket, dir, pattern, options = {})
|
26
|
+
@bucket = bucket
|
27
|
+
@dir = dir.end_with?('/') ? dir : dir + "/"
|
28
|
+
@pattern = pattern
|
29
|
+
@options = options
|
30
|
+
end
|
31
|
+
|
32
|
+
def upload request_options = {}
|
33
|
+
item = Happening::S3::Item.new(@bucket, '', @options)
|
34
|
+
item.get { |response|
|
35
|
+
keys = parse_keys(response.response)
|
36
|
+
check_md5_and_upload keys
|
37
|
+
}
|
38
|
+
end
|
39
|
+
|
40
|
+
def self.run s3id, s3key, bucket, dir, pattern
|
41
|
+
EM.run {
|
42
|
+
items = UploadCollection.new bucket, dir, pattern, :aws_access_key_id => s3id, :aws_secret_access_key => s3key, :protocol => 'http', :permissions => 'public-read'
|
43
|
+
items.upload
|
44
|
+
}
|
45
|
+
end
|
46
|
+
|
47
|
+
private
|
48
|
+
|
49
|
+
def parse_keys xml
|
50
|
+
h = {}
|
51
|
+
doc = Nokogiri::XML(xml)
|
52
|
+
doc.css("Contents").each { |entry|
|
53
|
+
key = entry.at("Key").content
|
54
|
+
md5 = entry.at("ETag").content.gsub(/\A"/m, "").gsub(/"\Z/m, "")
|
55
|
+
h[key] = md5
|
56
|
+
}
|
57
|
+
return h
|
58
|
+
end
|
59
|
+
|
60
|
+
def check_md5_and_upload keys
|
61
|
+
manager = TaskManager.new
|
62
|
+
on_error = Proc.new {|response| puts "An error occured: #{response.response_header.status}"; manager.remove; }
|
63
|
+
on_success = Proc.new {|response| manager.remove; }
|
64
|
+
|
65
|
+
Dir.glob(@dir + @pattern) {|filename|
|
66
|
+
next unless File.file? filename
|
67
|
+
|
68
|
+
key = filename[@dir.length..-1]
|
69
|
+
content = File.read(filename)
|
70
|
+
md5 = Digest::MD5.hexdigest(content)
|
71
|
+
|
72
|
+
if keys[key] != md5
|
73
|
+
puts "uploading #{key} to #{@bucket}"
|
74
|
+
item = Happening::S3::Item.new(@bucket, key, @options)
|
75
|
+
item.put(content, :on_error => on_error, :on_success => on_success)
|
76
|
+
manager.add
|
77
|
+
else
|
78
|
+
puts "ignore #{key}, no change"
|
79
|
+
end
|
80
|
+
}
|
81
|
+
|
82
|
+
manager.end_adding
|
83
|
+
end
|
84
|
+
|
85
|
+
end
|
86
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
#Monkey patch
|
2
|
+
module Happening
|
3
|
+
class AWS
|
4
|
+
protected
|
5
|
+
alias old_canonical_request_description canonical_request_description
|
6
|
+
|
7
|
+
def canonical_request_description(method, path, headers = {}, expires = nil)
|
8
|
+
description = old_canonical_request_description(method, path, headers, expires)
|
9
|
+
description << '?delete' if path[/[&?]delete($|&|=)/]
|
10
|
+
description
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
module S3
|
15
|
+
class Request
|
16
|
+
protected
|
17
|
+
def validate
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
class Item
|
22
|
+
protected
|
23
|
+
def validate
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
module S3Batch
|
2
|
+
class DeleteCollection < Happening::S3::Item
|
3
|
+
MULTIPLE_DELETE_LIMIT = 1000
|
4
|
+
|
5
|
+
def initialize(bucket, keys, options = {})
|
6
|
+
super bucket, "NO_USE", options
|
7
|
+
@keys = keys
|
8
|
+
end
|
9
|
+
|
10
|
+
def path(with_bucket=true)
|
11
|
+
with_bucket ? "/#{bucket}/?delete" : "/?delete"
|
12
|
+
end
|
13
|
+
|
14
|
+
def delete request_options = {}, &blk
|
15
|
+
i = 0
|
16
|
+
while i < @keys.size
|
17
|
+
keys = @keys.slice(i, MULTIPLE_DELETE_LIMIT)
|
18
|
+
data = "<Delete><Quiet>true</Quiet><Object><Key>" + keys.join("</Key></Object><Object><Key>") + "</Key></Object></Delete>"
|
19
|
+
md5 = Base64.encode64(Digest::MD5.digest(data)).strip
|
20
|
+
|
21
|
+
headers = aws.sign("POST", path, {"Content-MD5" => md5})
|
22
|
+
request_options[:on_success] = blk if blk
|
23
|
+
request_options.update(:headers => headers, :data => data)
|
24
|
+
Happening::S3::Request.new(:post, url, {:ssl => options[:ssl]}.update(request_options)).execute
|
25
|
+
i += MULTIPLE_DELETE_LIMIT
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def run s3id, s3key, bucket, keys
|
30
|
+
EM.run {
|
31
|
+
on_error = Proc.new {|response| puts "An error occured: #{response.response}"; EM.stop }
|
32
|
+
on_success = Proc.new {|response| puts "Deleted!"; EM.stop }
|
33
|
+
items = DeleteCollection.new bucket, keys, :aws_access_key_id => s3id, :aws_secret_access_key => s3key, :protocol => 'http'
|
34
|
+
items.delete(:on_error => on_error, :on_success => on_success)
|
35
|
+
}
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
data/lib/s3batch.rb
ADDED
@@ -0,0 +1,18 @@
|
|
1
|
+
require 'happening'
|
2
|
+
require 'nokogiri'
|
3
|
+
|
4
|
+
require File.expand_path(File.dirname(__FILE__) + '/s3batch/happening_patch')
|
5
|
+
require File.expand_path(File.dirname(__FILE__) + '/s3batch/multi_delete')
|
6
|
+
require File.expand_path(File.dirname(__FILE__) + '/s3batch/batch_upload')
|
7
|
+
|
8
|
+
#ruby s3batch.rb S3ID S3KEY BUCKET DIR
|
9
|
+
if $0 == __FILE__
|
10
|
+
s3id = ARGV[0]
|
11
|
+
s3key = ARGV[1]
|
12
|
+
bucket = ARGV[2]
|
13
|
+
dir = ARGV[3]
|
14
|
+
pattern = ARGV[4] || "**/*"
|
15
|
+
|
16
|
+
S3Batch::UploadCollection.run s3id, s3key, bucket, dir, pattern
|
17
|
+
end
|
18
|
+
|
metadata
ADDED
@@ -0,0 +1,72 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: s3batch
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
prerelease:
|
6
|
+
platform: ruby
|
7
|
+
authors:
|
8
|
+
- Xue Yong Zhi
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2012-05-30 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: happening
|
16
|
+
requirement: &70551020 !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ! '>='
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '0'
|
22
|
+
type: :runtime
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: *70551020
|
25
|
+
- !ruby/object:Gem::Dependency
|
26
|
+
name: nokogiri
|
27
|
+
requirement: &70550310 !ruby/object:Gem::Requirement
|
28
|
+
none: false
|
29
|
+
requirements:
|
30
|
+
- - ! '>='
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '0'
|
33
|
+
type: :runtime
|
34
|
+
prerelease: false
|
35
|
+
version_requirements: *70550310
|
36
|
+
description:
|
37
|
+
email:
|
38
|
+
- yong@intridea.com
|
39
|
+
executables: []
|
40
|
+
extensions: []
|
41
|
+
extra_rdoc_files: []
|
42
|
+
files:
|
43
|
+
- lib/s3batch/multi_delete.rb
|
44
|
+
- lib/s3batch/batch_upload.rb
|
45
|
+
- lib/s3batch/happening_patch.rb
|
46
|
+
- lib/s3batch.rb
|
47
|
+
homepage: http://github.com/yong/s3batch
|
48
|
+
licenses: []
|
49
|
+
post_install_message:
|
50
|
+
rdoc_options: []
|
51
|
+
require_paths:
|
52
|
+
- lib
|
53
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
54
|
+
none: false
|
55
|
+
requirements:
|
56
|
+
- - ! '>='
|
57
|
+
- !ruby/object:Gem::Version
|
58
|
+
version: '0'
|
59
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
60
|
+
none: false
|
61
|
+
requirements:
|
62
|
+
- - ! '>='
|
63
|
+
- !ruby/object:Gem::Version
|
64
|
+
version: '0'
|
65
|
+
requirements: []
|
66
|
+
rubyforge_project:
|
67
|
+
rubygems_version: 1.8.10
|
68
|
+
signing_key:
|
69
|
+
specification_version: 3
|
70
|
+
summary: upload/delete s3 objects in batch
|
71
|
+
test_files: []
|
72
|
+
has_rdoc:
|