s3_dir 0.0.1 → 0.0.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/s3_dir/version.rb +1 -1
- data/lib/s3_dir.rb +61 -35
- metadata +2 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 92a816d5e2e14b3c1238da1987a697f5748be223
|
4
|
+
data.tar.gz: b7aca8ce2710278a087d70ba5bb63e599a010ab4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a83953cdbdd9118062ca6c564eb6469e9107b8fd5ff0a89eb213c20ad35530ad5fb44c0e4a54f16c7dc64632f20298059fa4843b52edc2dae37a5b36973b8e69
|
7
|
+
data.tar.gz: c6225bc8585036a0920d716dab714348053da2d4761042844b0d8a29a6f61f40fe999948a785004ab3d3ee47e134dfeefcc31f86699953bf8ef814a65777611c
|
data/lib/s3_dir/version.rb
CHANGED
data/lib/s3_dir.rb
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
require_relative 's3_dir/version'
|
2
2
|
require 'fog/aws/storage'
|
3
|
+
require 'digest/md5'
|
3
4
|
|
4
5
|
# S3Dir uploads files to S3
|
5
6
|
#
|
@@ -26,51 +27,76 @@ require 'fog/aws/storage'
|
|
26
27
|
# of its contents not public.
|
27
28
|
#
|
28
29
|
module S3Dir
|
30
|
+
|
29
31
|
# Upload files to S3
|
30
32
|
def self.upload dir, key, options={}
|
31
|
-
|
33
|
+
uploader = Uploader.new(dir, key, options)
|
34
|
+
uploader.upload
|
35
|
+
end
|
36
|
+
|
37
|
+
class Uploader
|
38
|
+
attr_reader :key
|
39
|
+
attr_reader :bucket
|
40
|
+
attr_reader :storage
|
41
|
+
attr_reader :is_public
|
42
|
+
attr_reader :files_path
|
43
|
+
|
44
|
+
def initialize dir, key, options
|
45
|
+
@files_path = File.expand_path(dir)
|
32
46
|
|
33
|
-
|
34
|
-
|
35
|
-
|
47
|
+
# Merge defaults with passed-in options
|
48
|
+
settings = {credential: ENV['FOG_CREDENTIAL'],
|
49
|
+
private: false}.merge(options)
|
36
50
|
|
37
|
-
|
38
|
-
|
39
|
-
# Fog::Storage object with custom settings
|
40
|
-
all_credentials = YAML::load_file(File.join(ENV['HOME'], '.fog'))
|
41
|
-
credential = settings[:credential]
|
42
|
-
credentials = all_credentials[credential]
|
43
|
-
access_key = credentials[:aws_access_key_id]
|
44
|
-
secret_key = credentials[:aws_secret_access_key]
|
45
|
-
region = credentials[:region] || 'us-west-2'
|
51
|
+
# Configure Fog
|
52
|
+
Fog.credential = settings[:credential]
|
46
53
|
|
47
|
-
|
48
|
-
|
49
|
-
# the name (i.e. website bucket)
|
50
|
-
endpoint = 'http://s3.amazonaws.com'
|
54
|
+
# Get a region
|
55
|
+
region = Fog.credentials[:region] || 'us-west-2'
|
51
56
|
|
52
|
-
|
53
|
-
|
57
|
+
# If we don't specify this endpoint, Fog will complain about
|
58
|
+
# not using the correct endpoint if the bucket has dots in
|
59
|
+
# the name (i.e. website bucket)
|
60
|
+
endpoint = 'http://s3.amazonaws.com'
|
54
61
|
|
55
|
-
|
56
|
-
|
57
|
-
# our website bucket (if we're using a bucket with dots in the name)
|
58
|
-
# not being covered by the SSL certificate.
|
59
|
-
storage = Fog::Storage.new(provider: 'aws', aws_access_key_id: access_key,
|
60
|
-
aws_secret_access_key: secret_key,
|
61
|
-
path_style: true, region: region,
|
62
|
-
endpoint: endpoint)
|
63
|
-
bucket = storage.directories.get(key)
|
64
|
-
bucket ||= storage.directories.create(key: key, public: is_public)
|
62
|
+
# This may be a public bucket
|
63
|
+
@is_public = !settings[:private]
|
65
64
|
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
65
|
+
# Set up our storage object
|
66
|
+
# We have to specify path_style here because Fog will complain about
|
67
|
+
# our website bucket (if we're using a bucket with dots in the name)
|
68
|
+
# not being covered by the SSL certificate.
|
69
|
+
fog_options = Fog.credentials.merge({provider: 'aws', path_style: true,
|
70
|
+
region: region, endpoint: endpoint})
|
71
|
+
fog_options.delete(:key_name)
|
72
|
+
@storage = Fog::Storage.new(fog_options)
|
73
|
+
@bucket = storage.directories.get(key)
|
74
|
+
@bucket ||= storage.directories.create(key: key, public: is_public)
|
75
|
+
@key = key
|
76
|
+
end
|
77
|
+
|
78
|
+
def upload
|
79
|
+
Dir.chdir(files_path) do
|
80
|
+
Dir['**/*'].each do |entry|
|
81
|
+
File.directory?(entry) ? create_directory(entry) : create_file(entry)
|
72
82
|
end
|
73
83
|
end
|
74
84
|
end
|
85
|
+
|
86
|
+
private
|
87
|
+
|
88
|
+
def create_directory entry
|
89
|
+
bucket.files.create(key: entry, public: is_public)
|
90
|
+
end
|
91
|
+
|
92
|
+
def create_file entry
|
93
|
+
storage.head_object(key, entry, {'If-None-Match' => md5(entry)})
|
94
|
+
rescue Excon::Errors::NotFound
|
95
|
+
bucket.files.create(key: entry, public: is_public, body: File.open(entry))
|
96
|
+
end
|
97
|
+
|
98
|
+
def md5 entry
|
99
|
+
Digest::MD5.digest(entry)
|
100
|
+
end
|
75
101
|
end
|
76
102
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: s3_dir
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- nuex
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2013-12-
|
11
|
+
date: 2013-12-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -106,4 +106,3 @@ signing_key:
|
|
106
106
|
specification_version: 4
|
107
107
|
summary: Uploads a directory of files to an AWS S3 bucket
|
108
108
|
test_files: []
|
109
|
-
has_rdoc:
|