sublimate 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Readme.md ADDED
@@ -0,0 +1,32 @@
1
+ Sublimate
2
+ =========
3
+
4
+ This is a helper library for uploading extremely large files using Fog. It splits them into chunks and uses multipart uploads on S3 or the manifest file on Rackspace Cloud.
5
+
6
+ Set the following environment variables to run real, live tests (Mocks don't work for multipart stuff in Fog yet):
7
+
8
+ * AWS:
9
+ * AWS_ACCESS_KEY
10
+ * AWS_SECRET_ACCESS_KEY
11
+ * Rackspace
12
+ * RACKSPACE_USERNAME
13
+ * RACKSPACE_API_KEY
14
+
15
+
16
+ Usage
17
+ =====
18
+ You'll need an instance of `Sublimate::Uploader`. Just pass it a Fog bucket/directory and call `store_file`
19
+
20
+ Example:
21
+
22
+ ```ruby
23
+ opts = {
24
+ :provider => 'AWS',
25
+ :aws_access_key_id => ENV['AWS_ACCESS_KEY'],
26
+ :aws_secret_access_key => ENV['AWS_SECRET_ACCESS_KEY']
27
+ }
28
+ s3 = Fog::Storage.new(opts)
29
+ bucket = s3.directories.get('HUGE-FILES')
30
+ uploader = Sublimate::Uploader.new(bucket)
31
+ upload.store_file(path, :key => 'something.iso')
32
+ ```
data/lib/sublimate.rb CHANGED
@@ -1,8 +1,8 @@
1
- require 'fog'
2
1
  # rackspace cloud: http://www.rackspace.com/knowledge_center/index.php/Does_Cloud_Files_support_large_file_transfer
3
2
  # s3: https://gist.github.com/908875
4
3
  # Google Storage apparently allows single PUTs up to 50GB
5
4
  module Sublimate
6
5
  end
6
+ require 'fog'
7
7
  require 'sublimate/chunked_file'
8
8
  require 'sublimate/uploader'
@@ -1,62 +1,51 @@
1
1
  module Sublimate
2
2
  class Uploader
3
- def initialize(attrs)
4
- @attrs = attrs
5
-
6
- fog_attrs = attrs.clone
7
- fog_attrs.delete(:auto_create_bucket)
8
- fog_attrs.delete(:multipart_chunk_size)
9
- fog_attrs.delete(:bucket)
10
-
11
- @fog_storage = Fog::Storage.new(fog_attrs)
12
- end
13
-
14
- def storage
15
- @fog_storage
3
+ def initialize(bucket, opts = {})
4
+ @bucket = bucket
5
+ @opts = opts
16
6
  end
17
7
 
8
+ attr_accessor :bucket
18
9
  def store_file(path, opts = {})
19
10
  opts = {:key => path}.merge(opts)
20
11
  size = File.size(path)
21
12
 
22
- bucket = @fog_storage.directories.get(opts[:bucket])
23
- if bucket.nil? && @attrs[:auto_create_bucket]
24
- bucket = @fog_storage.directories.create(:key => opts[:bucket])
25
- elsif bucket.nil?
26
- raise "bucket not found"
27
- end
28
-
29
- m = "do_multi_#{@attrs[:provider].downcase}"
13
+ m = multi_method_name
30
14
  if respond_to?(m)
31
- chunked = ChunkedFile.new(path, :chunk_size => @attrs[:multipart_chunk_size])
32
- send(m, bucket, chunked, opts)
15
+ chunked = ChunkedFile.new(path, :chunk_size => @opts[:multipart_chunk_size])
16
+ send(m, chunked, opts)
33
17
  else
34
18
  opts[:body] = File.open(path)
35
19
  bucket.files.create(opts)
36
20
  end
37
21
  end
38
22
 
39
- def do_multi_aws(bucket, chunked, opts)
40
- multi = @fog_storage.initiate_multipart_upload(opts[:bucket], opts[:key])
23
+ def multi_method_name
24
+ n = bucket.connection.service.to_s.split('::').last.downcase
25
+ "do_multi_#{n}"
26
+ end
27
+
28
+ def do_multi_aws(chunked, opts)
29
+ multi = bucket.connection.initiate_multipart_upload(bucket.key, opts[:key])
41
30
  upload_id = multi.body["UploadId"]
42
31
  results = []
43
32
  chunked.each_chunk do |data, details|
44
- part = @fog_storage.upload_part(opts[:bucket], opts[:key], upload_id, details[:counter] + 1, data)
33
+ part = bucket.connection.upload_part(bucket.key, opts[:key], upload_id, details[:counter] + 1, data)
45
34
  etag = part.headers['ETag']
46
35
  results << etag
47
36
  end
48
37
 
49
- completed_upload = @fog_storage.complete_multipart_upload(opts[:bucket], opts[:key], upload_id, results)
38
+ completed_upload = bucket.connection.complete_multipart_upload(bucket.key, opts[:key], upload_id, results)
50
39
  end
51
40
 
52
- def do_multi_rackspace(bucket, chunked, opts)
41
+ def do_multi_rackspace(chunked, opts)
53
42
  count = 1
54
43
  chunked.each_chunk do |data, details|
55
44
  key = opts[:key] + "/#{count}"
56
45
  bucket.files.create(:key => key, :body => data)
57
46
  count += 1
58
47
  end
59
- result = @fog_storage.put_object_manifest(bucket.identity, opts[:key])
48
+ result = bucket.connection.put_object_manifest(bucket.key, opts[:key])
60
49
  end
61
50
 
62
51
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sublimate
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.1.1
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -10,16 +10,28 @@ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
12
  date: 2011-12-26 00:00:00.000000000Z
13
- dependencies: []
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: fog
16
+ requirement: &70351569528960 !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ! '>='
20
+ - !ruby/object:Gem::Version
21
+ version: 1.1.2
22
+ type: :runtime
23
+ prerelease: false
24
+ version_requirements: *70351569528960
14
25
  description: Store ginormous files on S3 or the Rackspace Cloud using this gem.
15
26
  email: mrkurt@gmail.com
16
27
  executables: []
17
28
  extensions: []
18
29
  extra_rdoc_files: []
19
30
  files:
20
- - lib/sublimate.rb
21
31
  - lib/sublimate/chunked_file.rb
22
32
  - lib/sublimate/uploader.rb
33
+ - lib/sublimate.rb
34
+ - Readme.md
23
35
  homepage: https://github.com/mrkurt/sublimate
24
36
  licenses: []
25
37
  post_install_message: