s3_uploader 0.0.7 → 0.0.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- MTlmOTgzMTU3ODA0OGZiYTRhYmUxNGVkZjkwZDNjOGU3YzRiMGYxMg==
4
+ MDFkOWViMjI4OWI3YjMyOTIwMzc2Y2E4ZTNiNzgyNjJiMTc2NWE5NA==
5
5
  data.tar.gz: !binary |-
6
- NDdiYzkwNTVkOGNkOTNmNTcyYmU1NjA5NGMwYzYzNDEyNTViYzlkNw==
6
+ ZjAwZjNiOTZhYmE3ZDllYTI1Mjk5OGY2ODg3MjQzNjlhOGJhY2M0OQ==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- MWRhYTY2ZmZkZjg0OTA0ZjIzYjcyMjVhMzk2ZjRlMWMyY2NjNWQ3MWExYTQz
10
- M2ZmYjliYWI0YTQ2OThjMzU5YmZmYjk4NmQ2NDU4ZDU4MTk3MTg0MjQ1YzIz
11
- YjgxMWQ4NmNkZmZiMWQ2ZTIxOTQyYmI1ZDU5OGNjZDM4NDExNTI=
9
+ NGVkNmRjNWNjYzc3NzY2OGY5YjQwYWYwZThjYmQ4ZjMzOGNmZmU5OGI2ODcx
10
+ MmJmN2NkMTIwNWY2NTU3M2VhN2JmNWYzOTVhNDQ1MTUxZjI5NTE3MGU3ZmE1
11
+ NTY0MjA5NzI1MjRjZDJkM2NjN2Q2MGMzMTcwODE5NDU3YWIzYTU=
12
12
  data.tar.gz: !binary |-
13
- ZGU5ZDY0NWJjNzdhZGMxZjBlODJhODEzMTViYTExYTg5NjYxNWI1ZmY5YjJi
14
- NmE2N2Y4YmZiNTZkZDRhNGQ1NThmMDI0YjcwMmViZTdkZTg3OTFhNzNkNDI3
15
- MDRmZTIzYTc4OWQ2YzFlMGVhNTliMzVhOTVhMzM3ZGE5MTU3NTk=
13
+ NTE1MjQyYjFhZGRlNDcwYjRlNDYyZDBiYWU0MGUwYjUzNGQ4M2M4NWFhZTdl
14
+ N2EwODBkNTliZTllZGZjZTk5YjNiODlkZjA0N2E3N2VmYTkxODI2MGNhZTE1
15
+ ODVlNDMwYzBiZDQ0OGQ1ZTI5ODE3Y2EyZjhjNDY0NmE2MjI2MDc=
data/README.md CHANGED
@@ -32,19 +32,24 @@ Or install it yourself as:
32
32
  :s3_secret => YOUR_SECRET_KEY,
33
33
  :destination_dir => 'test/',
34
34
  :region => 'eu-west-1',
35
- :threads => 4 })
35
+ :threads => 4,
36
+ :metadata => { 'Cache-Control' => 'max-age=315576000' }
37
+ })
36
38
 
37
39
  If no keys are provided, it uses S3_KEY and S3_SECRET environment variables. us-east-1 is the default region.
38
40
 
39
41
  S3Uploader.upload_directory('/tmp/test', 'mybucket', { :destination_dir => 'test/', :threads => 4 })
40
-
42
+
41
43
  Or as a command line binary
42
-
44
+
43
45
  s3uploader -r eu-west-1 -k YOUR_KEY -s YOUR_SECRET_KEY -d test/ -t 4 /tmp/test mybucket
44
-
46
+
45
47
  Again, it uses S3_KEY and S3_SECRET environment variables if non provided in parameters.
46
-
48
+
47
49
  s3uploader -d test/ -t 4 /tmp/test mybucket
50
+
51
+
52
+ Metadata headers are documented [here](http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html)
48
53
 
49
54
  ## TODO
50
55
 
@@ -62,6 +67,7 @@ Again, it uses S3_KEY and S3_SECRET environment variables if non provided in par
62
67
  ## Contributors
63
68
 
64
69
  * [Mark Wagner](https://github.com/theSociableme)
70
+ * [Brandon Hilkert](https://github.com/brandonhilkert)
65
71
 
66
72
  ## License
67
73
 
@@ -7,18 +7,19 @@ module S3Uploader
7
7
  :s3_key => ENV['S3_KEY'],
8
8
  :s3_secret => ENV['S3_SECRET'],
9
9
  :public => false,
10
- :region => 'us-east-1'
10
+ :region => 'us-east-1',
11
+ :metadata => {}
11
12
  }.merge(options)
12
-
13
+
13
14
  log = options[:logger] || Logger.new(STDOUT)
14
-
15
+
15
16
  raise 'Source must be a directory' unless File.directory?(source)
16
-
17
+
17
18
  if options[:connection]
18
19
  connection = options[:connection]
19
20
  else
20
21
  raise "Missing access keys" if options[:s3_key].nil? or options[:s3_secret].nil?
21
-
22
+
22
23
  connection = Fog::Storage.new({
23
24
  :provider => 'AWS',
24
25
  :aws_access_key_id => options[:s3_key],
@@ -26,7 +27,7 @@ module S3Uploader
26
27
  :region => options[:region]
27
28
  })
28
29
  end
29
-
30
+
30
31
  source = source.chop if source.end_with?('/')
31
32
  if options[:destination_dir] != '' and !options[:destination_dir].end_with?('/')
32
33
  options[:destination_dir] = "#{options[:destination_dir]}/"
@@ -36,20 +37,20 @@ module S3Uploader
36
37
  Dir.glob("#{source}/**/*").select{ |f| !File.directory?(f) }.each do |f|
37
38
  files << f
38
39
  total_size += File.size(f)
39
-
40
+
40
41
  end
41
-
42
+
42
43
  directory = connection.directories.new(:key => bucket)
43
-
44
+
44
45
  start = Time.now
45
46
  total_files = files.size
46
47
  file_number = 0
47
48
  @mutex = Mutex.new
48
-
49
+
49
50
  threads = []
50
51
  options[:threads].times do |i|
51
52
  threads[i] = Thread.new {
52
-
53
+
53
54
  until files.empty?
54
55
  @mutex.synchronize do
55
56
  file_number += 1
@@ -60,22 +61,23 @@ module S3Uploader
60
61
  key = file.gsub(source, '')[1..-1]
61
62
  dest = "#{options[:destination_dir]}#{key}"
62
63
  log.info("[#{Thread.current["file_number"]}/#{total_files}] Uploading #{key} to s3://#{bucket}/#{dest}")
63
-
64
+
64
65
  directory.files.create(
65
66
  :key => dest,
66
67
  :body => File.open(file),
67
- :public => options[:public]
68
+ :public => options[:public],
69
+ :metadata => options[:metadata]
68
70
  )
69
71
  end
70
- end
72
+ end
71
73
  }
72
74
  end
73
75
  threads.each { |t| t.join }
74
-
76
+
75
77
  finish = Time.now
76
78
  elapsed = finish.to_f - start.to_f
77
79
  mins, secs = elapsed.divmod 60.0
78
80
  log.info("Uploaded %d (%.#{0}f KB) in %d:%04.2f" % [total_files, total_size / KILO_SIZE, mins.to_i, secs])
79
-
81
+
80
82
  end
81
83
  end
@@ -1,3 +1,3 @@
1
1
  module S3Uploader
2
- VERSION = "0.0.7"
2
+ VERSION = "0.0.8"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3_uploader
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.7
4
+ version: 0.0.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Christian Hein
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2013-09-30 00:00:00.000000000 Z
11
+ date: 2013-11-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fog