blobstore_client 0.5.0 → 1.5.0.pre.1113

Sign up to get free protection for your applications and to get access to all the features.
@@ -3,17 +3,22 @@
3
3
  module Bosh
4
4
  module Blobstore
5
5
  class Client
6
- def create(contents)
7
- end
6
+ PROVIDER_NAMES = %w[dav simple s3 swift atmos local]
7
+
8
+ def self.create(blobstore_provider, options = {})
9
+ unless PROVIDER_NAMES.include? blobstore_provider
10
+ raise BlobstoreError, "Invalid client provider, available providers are: #{PROVIDER_NAMES}"
11
+ end
8
12
 
9
- def get(id, file = nil)
13
+ blobstore_client_constantize(blobstore_provider).new(options)
10
14
  end
11
15
 
12
- def delete(id)
16
+ private
17
+
18
+ def self.blobstore_client_constantize(base_string)
19
+ class_string = base_string.capitalize + (base_string == 'local' ? '' : 'Blobstore') + 'Client'
20
+ Bosh::Blobstore.const_get class_string
13
21
  end
14
22
  end
15
23
  end
16
24
  end
17
-
18
-
19
-
@@ -0,0 +1,67 @@
1
+ # Copyright (c) 2009-2012 VMware, Inc.
2
+
3
+ require 'base64'
4
+ require 'httpclient'
5
+ require 'digest/sha1'
6
+
7
+ module Bosh
8
+ module Blobstore
9
+ class DavBlobstoreClient < BaseClient
10
+
11
+ def initialize(options)
12
+ super(options)
13
+ @client = HTTPClient.new
14
+ @endpoint = @options[:endpoint]
15
+ # @bucket = @options[:bucket] || "resources" # dav (or simple) doesn't support buckets
16
+ @headers = {}
17
+ user = @options[:user]
18
+ password = @options[:password]
19
+ if user && password
20
+ @headers['Authorization'] = 'Basic ' +
21
+ Base64.encode64("#{user}:#{password}").strip
22
+ end
23
+ end
24
+
25
+ def url(id)
26
+ prefix = Digest::SHA1.hexdigest(id)[0, 2]
27
+
28
+ [@endpoint, prefix, id].compact.join('/')
29
+ end
30
+
31
+ def create_file(id, file)
32
+ id ||= generate_object_id
33
+
34
+ response = @client.put(url(id), file, @headers)
35
+
36
+ raise BlobstoreError, "Could not create object, #{response.status}/#{response.content}" if response.status != 201
37
+
38
+ id
39
+ end
40
+
41
+ def get_file(id, file)
42
+ response = @client.get(url(id), {}, @headers) do |block|
43
+ file.write(block)
44
+ end
45
+
46
+ raise BlobstoreError, "Could not fetch object, #{response.status}/#{response.content}" if response.status != 200
47
+ end
48
+
49
+ def delete_object(id)
50
+ response = @client.delete(url(id), @headers)
51
+
52
+ raise BlobstoreError, "Could not delete object, #{response.status}/#{response.content}" if response.status != 204
53
+ end
54
+
55
+ def object_exists?(id)
56
+ response = @client.head(url(id), header: @headers)
57
+ if response.status == 200
58
+ true
59
+ elsif response.status == 404
60
+ false
61
+ else
62
+ raise BlobstoreError, "Could not get object existence, #{response.status}/#{response.content}"
63
+ end
64
+ end
65
+ end
66
+ end
67
+ end
@@ -5,6 +5,7 @@ module Bosh
5
5
 
6
6
  class BlobstoreError < StandardError; end
7
7
  class NotFound < BlobstoreError; end
8
+ class NotImplemented < BlobstoreError; end
8
9
 
9
10
  end
10
11
  end
@@ -3,47 +3,56 @@
3
3
  module Bosh
4
4
  module Blobstore
5
5
  class LocalClient < BaseClient
6
- CHUNK_SIZE = 1024*1024
6
+ CHUNK_SIZE = 1024 * 1024
7
7
 
8
8
  def initialize(options)
9
9
  super(options)
10
- @blobstore_path = @options[:blobstore_path]
11
- raise "No blobstore path given" if @blobstore_path.nil?
10
+ @blobstore_path = URI(@options[:blobstore_path]).path
11
+ raise "No blobstore path given in options #{@options}" if @blobstore_path.nil?
12
12
  FileUtils.mkdir_p(@blobstore_path) unless File.directory?(@blobstore_path)
13
13
  end
14
14
 
15
- def create_file(file)
16
- id = UUIDTools::UUID.random_create.to_s
17
- dst = File.join(@blobstore_path, id)
15
+ protected
16
+
17
+ def create_file(id, file)
18
+ id ||= generate_object_id
19
+ dst = object_file_path(id)
20
+ raise BlobstoreError, "object id #{id} is already in use" if File.exist?(dst)
18
21
  File.open(dst, 'w') do |fh|
19
- until file.eof?
20
- fh.write(file.read(CHUNK_SIZE))
21
- end
22
+ fh.write(file.read(CHUNK_SIZE)) until file.eof?
22
23
  end
23
24
  id
24
25
  end
25
26
 
26
27
  def get_file(id, file)
27
- src = File.join(@blobstore_path, id)
28
+ src = object_file_path(id)
28
29
 
29
30
  begin
30
31
  File.open(src, 'r') do |src_fh|
31
- until src_fh.eof?
32
- file.write(src_fh.read(CHUNK_SIZE))
33
- end
32
+ file.write(src_fh.read(CHUNK_SIZE)) until src_fh.eof?
34
33
  end
35
34
  end
36
35
  rescue Errno::ENOENT
37
36
  raise NotFound, "Blobstore object '#{id}' not found"
38
37
  end
39
38
 
40
- def delete(id)
41
- file = File.join(@blobstore_path, id)
39
+ def delete_object(id)
40
+ file = object_file_path(id)
42
41
  FileUtils.rm(file)
43
42
  rescue Errno::ENOENT
44
43
  raise NotFound, "Blobstore object '#{id}' not found"
45
44
  end
46
45
 
46
+ def object_exists?(oid)
47
+ File.exists?(object_file_path(oid))
48
+ end
49
+
50
+ private
51
+
52
+ def object_file_path(oid)
53
+ File.join(@blobstore_path, oid)
54
+ end
55
+
47
56
  end
48
57
  end
49
58
  end
@@ -1,20 +1,20 @@
1
1
  # Copyright (c) 2009-2012 VMware, Inc.
2
2
 
3
- require "openssl"
4
- require "digest/sha1"
5
- require "base64"
6
- require "aws/s3"
7
- require "uuidtools"
3
+ require 'openssl'
4
+ require 'digest/sha1'
5
+ require 'base64'
6
+ require 'aws'
7
+ require 'securerandom'
8
8
 
9
9
  module Bosh
10
10
  module Blobstore
11
11
 
12
12
  class S3BlobstoreClient < BaseClient
13
13
 
14
- ENDPOINT = "https://s3.amazonaws.com"
15
- DEFAULT_CIPHER_NAME = "aes-128-cbc"
14
+ ENDPOINT = 'https://s3.amazonaws.com'
15
+ DEFAULT_CIPHER_NAME = 'aes-128-cbc'
16
16
 
17
- attr_reader :bucket_name, :encryption_key
17
+ attr_reader :bucket_name, :encryption_key, :simple
18
18
 
19
19
  # Blobstore client for S3 with optional object encryption
20
20
  # @param [Hash] options S3connection options
@@ -32,144 +32,145 @@ module Bosh
32
32
  @encryption_key = @options[:encryption_key]
33
33
 
34
34
  aws_options = {
35
- :access_key_id => @options[:access_key_id],
36
- :secret_access_key => @options[:secret_access_key],
37
- :use_ssl => true,
38
- :port => 443
35
+ access_key_id: @options[:access_key_id],
36
+ secret_access_key: @options[:secret_access_key],
37
+ use_ssl: true,
38
+ port: 443,
39
+ s3_endpoint: URI.parse(@options[:endpoint] || S3BlobstoreClient::ENDPOINT).host,
39
40
  }
40
41
 
41
42
  # using S3 without credentials is a special case:
42
43
  # it is really the simple blobstore client with a bucket name
43
44
  if read_only?
45
+ if @encryption_key
46
+ raise BlobstoreError, "can't use read-only with an encryption key"
47
+ end
48
+
44
49
  unless @options[:bucket_name] || @options[:bucket]
45
- raise BlobstoreError, "bucket name required"
50
+ raise BlobstoreError, 'bucket name required'
46
51
  end
52
+
47
53
  @options[:bucket] ||= @options[:bucket_name]
48
54
  @options[:endpoint] ||= S3BlobstoreClient::ENDPOINT
49
55
  @simple = SimpleBlobstoreClient.new(@options)
50
56
  else
51
- AWS::S3::Base.establish_connection!(aws_options)
57
+ @s3 = AWS::S3.new(aws_options)
52
58
  end
53
59
 
54
- rescue AWS::S3::S3Exception => e
60
+ rescue AWS::Errors::Base => e
55
61
  raise BlobstoreError, "Failed to initialize S3 blobstore: #{e.message}"
56
62
  end
57
63
 
58
- def create_file(file)
59
- raise BlobstoreError, "unsupported action" if @simple
64
+ # @param [File] file file to store in S3
65
+ def create_file(object_id, file)
66
+ raise BlobstoreError, 'unsupported action' if @simple
60
67
 
61
- object_id = generate_object_id
68
+ object_id ||= generate_object_id
62
69
 
63
- if @encryption_key
64
- temp_path do |path|
65
- File.open(path, "w") do |temp_file|
66
- encrypt_stream(file, temp_file)
67
- end
68
- File.open(path, "r") do |temp_file|
69
- AWS::S3::S3Object.store(object_id, temp_file, bucket_name)
70
- end
71
- end
72
- elsif file.is_a?(String)
73
- File.open(file, "r") do |temp_file|
74
- AWS::S3::S3Object.store(object_id, temp_file, bucket_name)
75
- end
76
- else # Ruby 1.8 passes a File
77
- AWS::S3::S3Object.store(object_id, file, bucket_name)
78
- end
70
+ file = encrypt_file(file) if @encryption_key
71
+
72
+ # in Ruby 1.8 File doesn't respond to :path
73
+ path = file.respond_to?(:path) ? file.path : file
74
+ store_in_s3(path, full_oid_path(object_id))
79
75
 
80
76
  object_id
81
- rescue AWS::S3::S3Exception => e
82
- raise BlobstoreError,
83
- "Failed to create object, S3 response error: #{e.message}"
77
+ rescue AWS::Errors::Base => e
78
+ raise BlobstoreError, "Failed to create object, S3 response error: #{e.message}"
79
+ ensure
80
+ FileUtils.rm(file) if @encryption_key
84
81
  end
85
82
 
83
+ # @param [String] object_id object id to retrieve
84
+ # @param [File] file file to store the retrived object in
86
85
  def get_file(object_id, file)
86
+
87
+ object_id = full_oid_path(object_id)
87
88
  return @simple.get_file(object_id, file) if @simple
88
89
 
89
- object = AWS::S3::S3Object.find(object_id, bucket_name)
90
- from = lambda { |callback|
91
- object.value { |segment|
92
- # Looks like the aws code calls this block even if segment is empty.
93
- # Ideally it should be fixed upstream in the aws gem.
94
- unless segment.empty?
95
- callback.call(segment)
96
- end
97
- }
98
- }
99
90
  if @encryption_key
100
- decrypt_stream(from, file)
101
- else
102
- to_stream = write_stream(file)
103
- read_stream(from) { |segment| to_stream.call(segment) }
91
+ cipher = OpenSSL::Cipher::Cipher.new(DEFAULT_CIPHER_NAME)
92
+ cipher.decrypt
93
+ cipher.key = Digest::SHA1.digest(encryption_key)[0..(cipher.key_len - 1)]
94
+ end
95
+
96
+ object = get_object_from_s3(object_id)
97
+ object.read do |chunk|
98
+ if @encryption_key
99
+ file.write(cipher.update(chunk))
100
+ else
101
+ file.write(chunk)
102
+ end
104
103
  end
105
- rescue AWS::S3::NoSuchKey => e
104
+ file.write(cipher.final) if @encryption_key
105
+
106
+ rescue AWS::S3::Errors::NoSuchKey => e
106
107
  raise NotFound, "S3 object '#{object_id}' not found"
107
- rescue AWS::S3::S3Exception => e
108
- raise BlobstoreError,
109
- "Failed to find object '#{object_id}', S3 response error: #{e.message}"
108
+ rescue AWS::Errors::Base => e
109
+ raise BlobstoreError, "Failed to find object '#{object_id}', S3 response error: #{e.message}"
110
+ end
111
+
112
+ # @param [String] object_id object id to delete
113
+ def delete_object(object_id)
114
+ raise BlobstoreError, 'unsupported action' if @simple
115
+ object_id = full_oid_path(object_id)
116
+ object = get_object_from_s3(object_id)
117
+ unless object.exists?
118
+ raise BlobstoreError, "no such object: #{object_id}"
119
+ end
120
+ object.delete
121
+ rescue AWS::Errors::Base => e
122
+ raise BlobstoreError, "Failed to delete object '#{object_id}', S3 response error: #{e.message}"
110
123
  end
111
124
 
112
- def delete(object_id)
113
- raise BlobstoreError, "unsupported action" if @simple
125
+ def object_exists?(object_id)
126
+ object_id = full_oid_path(object_id)
127
+ return simple.exists?(object_id) if simple
114
128
 
115
- AWS::S3::S3Object.delete(object_id, bucket_name)
116
- rescue AWS::S3::S3Exception => e
117
- raise BlobstoreError,
118
- "Failed to delete object '#{object_id}', S3 response error: #{e.message}"
129
+ get_object_from_s3(object_id).exists?
119
130
  end
120
131
 
121
132
  protected
122
133
 
123
- def generate_object_id
124
- UUIDTools::UUID.random_create.to_s
134
+ # @param [String] oid object id
135
+ # @return [AWS::S3::S3Object] S3 object
136
+ def get_object_from_s3(oid)
137
+ @s3.buckets[bucket_name].objects[oid]
125
138
  end
126
139
 
127
- def encrypt_stream(from, to)
128
- cipher = OpenSSL::Cipher::Cipher.new(DEFAULT_CIPHER_NAME)
129
- cipher.encrypt
130
- cipher.key = Digest::SHA1.digest(encryption_key)[0..cipher.key_len-1]
131
-
132
- to_stream = write_stream(to)
133
- read_stream(from) { |segment| to_stream.call(cipher.update(segment)) }
134
- to_stream.call(cipher.final)
135
- rescue StandardError => e
136
- raise BlobstoreError, "Encryption error: #{e}"
140
+ # @param [String] path path to file which will be stored in S3
141
+ # @param [String] oid object id
142
+ # @return [void]
143
+ def store_in_s3(path, oid)
144
+ s3_object = get_object_from_s3(oid)
145
+ raise BlobstoreError, "object id #{oid} is already in use" if s3_object.exists?
146
+ File.open(path, 'r') do |temp_file|
147
+ s3_object.write(temp_file)
148
+ end
137
149
  end
138
150
 
139
- def decrypt_stream(from, to)
151
+ def encrypt_file(file)
140
152
  cipher = OpenSSL::Cipher::Cipher.new(DEFAULT_CIPHER_NAME)
141
- cipher.decrypt
142
- cipher.key = Digest::SHA1.digest(encryption_key)[0..cipher.key_len-1]
143
-
144
- to_stream = write_stream(to)
145
- read_stream(from) { |segment| to_stream.call(cipher.update(segment)) }
146
- to_stream.call(cipher.final)
147
- rescue StandardError => e
148
- raise BlobstoreError, "Decryption error: #{e}"
149
- end
153
+ cipher.encrypt
154
+ cipher.key = Digest::SHA1.digest(encryption_key)[0..(cipher.key_len - 1)]
150
155
 
151
- def read_stream(stream, &block)
152
- if stream.respond_to?(:read)
153
- while contents = stream.read(32768)
154
- block.call(contents)
156
+ path = temp_path
157
+ File.open(path, 'w') do |temp_file|
158
+ while (block = file.read(32768))
159
+ temp_file.write(cipher.update(block))
155
160
  end
156
- elsif stream.kind_of?(Proc)
157
- stream.call(block)
161
+ temp_file.write(cipher.final)
158
162
  end
159
- end
160
163
 
161
- def write_stream(stream)
162
- if stream.respond_to?(:write)
163
- lambda { |contents| stream.write(contents)}
164
- elsif stream.kind_of?(Proc)
165
- stream
166
- end
164
+ path
167
165
  end
168
166
 
169
167
  def read_only?
170
168
  @options[:access_key_id].nil? && @options[:secret_access_key].nil?
171
169
  end
172
170
 
171
+ def full_oid_path(object_id)
172
+ @options[:folder] ? @options[:folder] + '/' + object_id : object_id
173
+ end
173
174
  end
174
175
  end
175
176
  end
@@ -1,6 +1,7 @@
1
1
  # Copyright (c) 2009-2012 VMware, Inc.
2
2
 
3
- require "httpclient"
3
+ require 'base64'
4
+ require 'httpclient'
4
5
 
5
6
  module Bosh
6
7
  module Blobstore
@@ -10,25 +11,25 @@ module Bosh
10
11
  super(options)
11
12
  @client = HTTPClient.new
12
13
  @endpoint = @options[:endpoint]
13
- @bucket = @options[:bucket] || "resources"
14
+ @bucket = @options[:bucket] || 'resources'
14
15
  @headers = {}
15
16
  user = @options[:user]
16
17
  password = @options[:password]
17
18
  if user && password
18
- @headers["Authorization"] = "Basic " +
19
+ @headers['Authorization'] = 'Basic ' +
19
20
  Base64.encode64("#{user}:#{password}").strip
20
21
  end
21
22
  end
22
23
 
23
- def url(id=nil)
24
- ["#{@endpoint}/#{@bucket}", id].compact.join("/")
24
+ def url(id = nil)
25
+ ["#{@endpoint}/#{@bucket}", id].compact.join('/')
25
26
  end
26
27
 
27
- def create_file(file)
28
- response = @client.post(url, {:content => file}, @headers)
28
+ def create_file(id, file)
29
+ response = @client.post(url(id), { content: file }, @headers)
29
30
  if response.status != 200
30
31
  raise BlobstoreError,
31
- "Could not create object, #{response.status}/#{response.content}"
32
+ "Could not create object, #{response.status}/#{response.content}"
32
33
  end
33
34
  response.content
34
35
  end
@@ -40,14 +41,26 @@ module Bosh
40
41
 
41
42
  if response.status != 200
42
43
  raise BlobstoreError,
43
- "Could not fetch object, #{response.status}/#{response.content}"
44
+ "Could not fetch object, #{response.status}/#{response.content}"
44
45
  end
45
46
  end
46
47
 
47
- def delete(id)
48
+ def delete_object(id)
48
49
  response = @client.delete(url(id), @headers)
49
50
  if response.status != 204
50
- raise "Could not delete object, #{response.status}/#{response.content}"
51
+ raise BlobstoreError,
52
+ "Could not delete object, #{response.status}/#{response.content}"
53
+ end
54
+ end
55
+
56
+ def object_exists?(id)
57
+ response = @client.head(url(id), header: @headers)
58
+ if response.status == 200
59
+ true
60
+ elsif response.status == 404
61
+ false
62
+ else
63
+ raise BlobstoreError, "Could not get object existence, #{response.status}/#{response.content}"
51
64
  end
52
65
  end
53
66
  end