fog-backblaze 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: e72a3f8088a51fed32e1b1efc3d18772ff03348b
4
+ data.tar.gz: 635bf11d1a64069912521d3c179fb5e46e67e3ec
5
+ SHA512:
6
+ metadata.gz: d3af95365d4d87cd941c016e18ebf326c9bcdbebb3d7440b29e1f8bc0557cc416c1fd9d973bc5e9cdf3a0091123bdff9b8c933d7c41e53ec25b682a421585e3c
7
+ data.tar.gz: 86a1546b2f45cf95bb0f79a93c97b3eaa3201b3d3f9655bd264cf13cc539d0c9e882d892392ab6d8f45e7cde3041bcaec7aa2bfe34d142ab7877e0a99b3bed6b
@@ -0,0 +1,8 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /_yardoc/
4
+ /coverage/
5
+ /doc/
6
+ /pkg/
7
+ /spec/reports/
8
+ /tmp/
@@ -0,0 +1,5 @@
1
+ sudo: false
2
+ language: ruby
3
+ rvm:
4
+ - 2.4.0
5
+ before_install: gem install bundler -v 1.15.1
data/Gemfile ADDED
@@ -0,0 +1,8 @@
1
+ source "https://rubygems.org"
2
+
3
+ # Specify your gem's dependencies in fog-backblaze.gemspec
4
+ gemspec
5
+
6
+ gem "bundler", ">= 1.15"
7
+ gem "rake", "~> 12.0"
8
+ gem "minitest", ">= 5.0"
@@ -0,0 +1,34 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ fog-backblaze (0.1.1)
5
+ fog-core
6
+
7
+ GEM
8
+ remote: https://rubygems.org/
9
+ specs:
10
+ builder (3.2.3)
11
+ excon (0.61.0)
12
+ fog-core (2.1.0)
13
+ builder
14
+ excon (~> 0.58)
15
+ formatador (~> 0.2)
16
+ mime-types
17
+ formatador (0.2.5)
18
+ mime-types (3.1)
19
+ mime-types-data (~> 3.2015)
20
+ mime-types-data (3.2016.0521)
21
+ minitest (5.11.3)
22
+ rake (12.3.1)
23
+
24
+ PLATFORMS
25
+ ruby
26
+
27
+ DEPENDENCIES
28
+ bundler (>= 1.15)
29
+ fog-backblaze!
30
+ minitest (>= 5.0)
31
+ rake (~> 12.0)
32
+
33
+ BUNDLED WITH
34
+ 1.16.1
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2017 Pavel Evstigneev
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,72 @@
1
+ # Fog::Backblaze
2
+
3
+ Integration library for gem fog and [Backblaze B2 Cloud Storage](https://www.backblaze.com/b2/cloud-storage.html)
4
+
5
+ ## Installation
6
+
7
+ Add this line to your application's Gemfile:
8
+
9
+ ```ruby
10
+ gem 'fog-backblaze'
11
+ ```
12
+
13
+ Or install it with gem:
14
+
15
+ ```sh
16
+ gem install fog-backblaze
17
+ ```
18
+
19
+ ## Usage
20
+
21
+ ```ruby
22
+ require "fog/backblaze"
23
+
24
+ connection = Fog::Storage.new(
25
+ provider: 'backblaze',
26
+ b2_account_id: '123456',
27
+ b2_account_token: 'aaaaabbbbbccccddddeeeeeffffff111112222223333',
28
+
29
+ # optional, used to make some operations faster
30
+ b2_bucket_name: 'app-test',
31
+ b2_bucket_id: '6ec42006ec42006ec42',
32
+
33
+ logger: Logger.new(STDOUT).tap {|l|
34
+ l.formatter = proc {|severity, datetime, progname, msg|
35
+ "#{severity.to_s[0]} - #{datetime.strftime("%T.%L")}: #{msg}\n"
36
+ }
37
+ },
38
+
39
+ token_cache: 'file.txt'
40
+ )
41
+ ```
42
+
43
+ See [example](examples/example.rb) for more details
44
+
45
+ ## Adding b2_bucket_id
46
+
47
+ Most of internal operations requires `bucketId` field, to get right value, fog-backblaze will make API request.
48
+ Usually applications use only one bucket and it's id never change (it may change only if we delete bucket and create new one with same name).
49
+ We can eliminate this API request by setting `b2_bucket_id` attribute.
50
+
51
+ How to get `b2_bucket_id`:
52
+ ```ruby
53
+ p connection._get_bucket_id(bucket_name)
54
+ ```
55
+
56
+ ## Token Cache
57
+
58
+ Each request requires authentication token, it comes from `b2_authorize_account` response.
59
+
60
+ Let's say we want to upload a files, then it will make 4 requests inernally:
61
+
62
+ 1. `b2_authorize_account` - valid for 24 hours
63
+ 2. `b2_list_buckets` - to get bucket_id value can be optimized with `:b2_bucket_id` field (should not change)
64
+ 3. `b2_get_upload_url` - valid for 24 hours
65
+ 4. Send data to URL from step 3
66
+
67
+ Results of steps 1, 2, 3 can be re-used by saving in TokenCache. It acts as general cachin interface with few predefined implementations:
68
+
69
+ * In memory store `token_cache: :memory` (default)
70
+ * JSON file store `token_cache: 'file.txt'`
71
+ * Null store (will not cache anything) `token_cache: false` or `token_cache: Fog::Backblaze::TokenCache::NullTokenCache.new`
72
+ * Create your custom, see [token_cache.rb](lib/fog/backblaze/token_cache.rb) for examples
@@ -0,0 +1,10 @@
1
+ require "bundler/gem_tasks"
2
+ require "rake/testtask"
3
+
4
+ Rake::TestTask.new(:test) do |t|
5
+ t.libs << "test"
6
+ t.libs << "lib"
7
+ t.test_files = FileList["test/**/*_test.rb"]
8
+ end
9
+
10
+ task :default => :test
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "fog/backblaze"
5
+
6
+ # You can add fixtures and/or initialization code here to make experimenting
7
+ # with your gem easier. You can also use a different console, if you like.
8
+
9
+ # (If you use this, don't forget to add pry to your Gemfile!)
10
+ # require "pry"
11
+ # Pry.start
12
+
13
+ require "irb"
14
+ IRB.start(__FILE__)
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
@@ -0,0 +1,70 @@
1
+ require 'logger'
2
+ require 'pp'
3
+
4
+ $:.push(File.expand_path("../../lib", __FILE__))
5
+
6
+ require_relative "../lib/fog/backblaze"
7
+
8
+ if !ENV['B2_ACCOUNT_ID'] || ENV['B2_ACCOUNT_ID'] == ""
9
+ puts "Missing env B2_ACCOUNT_ID"
10
+ exit 1
11
+ end
12
+
13
+ if !ENV['B2_ACCOUNT_TOKEN'] || ENV['B2_ACCOUNT_TOKEN'] == ""
14
+ puts "Missing env B2_ACCOUNT_TOKEN"
15
+ exit 1
16
+ end
17
+
18
+ connection = Fog::Storage.new(
19
+ provider: 'backblaze',
20
+ b2_account_id: ENV['B2_ACCOUNT_ID'],
21
+ b2_account_token: ENV['B2_ACCOUNT_TOKEN'],
22
+
23
+ #b2_bucket_name: ENV['B2_BUCKET'],
24
+ #b2_bucket_id: '111222333444',
25
+
26
+ logger: ENV['DEBUG'] ? Logger.new(STDOUT) : nil,
27
+ token_cache: :memory
28
+ )
29
+
30
+ connection.delete_bucket("fog-smoke-test") rescue nil
31
+
32
+ puts "Put a bucket..."
33
+ puts "----------------"
34
+ pp connection.put_bucket("fog-smoke-test", public: true).json
35
+
36
+ puts
37
+ puts "Get the bucket..."
38
+ puts "-----------------"
39
+ pp connection.get_bucket("fog-smoke-test").json
40
+
41
+ puts
42
+ puts "Put a test file..."
43
+ puts "---------------"
44
+ pp connection.put_object("fog-smoke-test", "my file", "THISISATESTFILE").json
45
+
46
+ puts
47
+ puts "Get the test file..."
48
+ puts "---------------"
49
+ p connection.get_object("fog-smoke-test", "my file")
50
+
51
+ puts
52
+ puts "Head file..."
53
+ puts "---------------"
54
+ pp connection.head_object("fog-smoke-test", "my file").headers
55
+
56
+ puts
57
+ puts "Object URL..."
58
+ puts "---------------"
59
+ p connection.get_object_url("fog-smoke-test", "my file")
60
+
61
+ puts
62
+ puts "Delete the test file..."
63
+ puts "---------------"
64
+ pp connection.delete_object("fog-smoke-test", "my file").json
65
+
66
+ puts
67
+ puts "Delete the bucket..."
68
+ puts "------------------"
69
+ pp connection.delete_bucket("fog-smoke-test").json
70
+ puts
@@ -0,0 +1,21 @@
1
+ # coding: utf-8
2
+
3
+ require_relative 'lib/fog/backblaze/version'
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = "fog-backblaze"
7
+ spec.version = Fog::Backblaze::VERSION
8
+ spec.authors = ["Pavel Evstigneev"]
9
+ spec.email = ["pavel.evst@gmail.com"]
10
+
11
+ spec.summary = "Module for the 'fog' gem to support Blackblade B2 stogate."
12
+ spec.description = "Blackblade B2 stogate client for 'fog' gem, can be used for working with files and buckets. E.g. carrierwave uploads"
13
+ spec.homepage = "https://github.com/fog/fog-backblaze"
14
+
15
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
16
+ f.match(%r{^(test|spec|features)/})
17
+ end
18
+ spec.require_paths = ["lib"]
19
+
20
+ spec.add_dependency "fog-core", ">= 1.40", "<3"
21
+ end
@@ -0,0 +1,16 @@
1
+ require_relative 'backblaze/version'
2
+ require_relative 'backblaze/token_cache'
3
+ require_relative 'backblaze/json_response'
4
+
5
+ require 'fog/core'
6
+ require 'json'
7
+
8
+ #require_relative 'backblaze/storage'
9
+
10
+ module Fog
11
+ module Backblaze
12
+ extend Fog::Provider
13
+ service(:storage, "Storage")
14
+
15
+ end
16
+ end
@@ -0,0 +1,25 @@
1
+ module Fog
2
+ module Backblaze
3
+ module JSONReponse
4
+
5
+ attr_writer :json
6
+
7
+ def raw_body
8
+ @body
9
+ end
10
+
11
+ def json
12
+ @json ||= JSON.parse(raw_body)
13
+ end
14
+
15
+ def assign_json_body!
16
+ self.body = json
17
+ end
18
+
19
+ def josn_response?
20
+ headers['Content-Type'].start_with?("application/json")
21
+ end
22
+
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,15 @@
1
+ class Fog::Storage::Backblaze::Directories < Fog::Collection
2
+ model Fog::Storage::Backblaze::Directory
3
+
4
+ def all
5
+ data = service.list_buckets
6
+ load(data)
7
+ end
8
+
9
+ def get(name)
10
+ list_response = service.list_buckets
11
+ bucket = list_response.json['buckets'].detect {|bucket| bucket['bucketName'] == name }
12
+ return new(bucket) if bucket
13
+ end
14
+
15
+ end
@@ -0,0 +1,37 @@
1
+ class Fog::Storage::Backblaze::Directory < Fog::Model
2
+ identity :key, aliases: %w(bucketName)
3
+
4
+ attribute :bucket_id, aliases: 'bucketId'
5
+ attribute :bucket_info, aliases: 'bucketInfo'
6
+ attribute :bucket_type, aliases: 'bucketType'
7
+
8
+ def destroy
9
+ requires :key
10
+ service.delete_bucket(key)
11
+ true
12
+ #rescue Fog::Errors::NotFound
13
+ # false
14
+ end
15
+
16
+ def save
17
+ requires :key
18
+ options = {}
19
+
20
+ response = service.put_bucket(key, options)
21
+
22
+ attributes[:bucket_id] = response.json['bucketId']
23
+ attributes[:bucket_type] = response.json['bucketType']
24
+ attributes[:bucket_info] = response.json['bucketInfo']
25
+
26
+ true
27
+ end
28
+
29
+ def files
30
+ @files ||= Fog::Storage::Backblaze::Files.new(directory: self, service: service)
31
+ end
32
+
33
+ def public?
34
+ attributes[:bucket_type] == "allPublic"
35
+ end
36
+
37
+ end
@@ -0,0 +1,52 @@
1
+ class Fog::Storage::Backblaze::File < Fog::Model
2
+
3
+ identity :file_name, aliases: %w{fileName key}
4
+
5
+ attribute :content_length, aliases: 'contentLength'
6
+ attribute :content_type, aliases: 'contentType'
7
+ attribute :file_id, aliases: 'fileId'
8
+ attribute :file_info, aliases: 'fileInfo'
9
+ attribute :upload_timestamp, aliases: 'uploadTimestamp'
10
+
11
+ attr_accessor :directory
12
+
13
+ # TODO: read content from cloud on demand
14
+ def body
15
+ attributes[:body] #||= file_id && (file = collection.get(identity)) ? file.body : ""
16
+ end
17
+
18
+ def body=(new_body)
19
+ attributes[:body] = new_body
20
+ end
21
+
22
+ alias_method :key, :file_name
23
+ alias_method :key=, :file_name=
24
+
25
+ def save(options = {})
26
+ requires :body, :directory, :key
27
+
28
+ options[:content_type] = content_type if content_type
29
+
30
+ data = service.put_object(directory.key, key, body, options)
31
+
32
+ merge_attributes(data.headers.reject { |key, _value| %w(contentLength contentType).include?(key) })
33
+
34
+ self.content_length = Fog::Storage.get_body_size(body)
35
+ self.content_type ||= Fog::Storage.get_content_type(body)
36
+
37
+ true
38
+ end
39
+
40
+ def public_url
41
+ requires :directory, :key
42
+
43
+ service.get_object_url(directory.key, key)
44
+ end
45
+
46
+ # TODO
47
+ #def url(expires)
48
+ # requires :key
49
+ # collection.get_https_url(key, expires)
50
+ #end
51
+
52
+ end
@@ -0,0 +1,65 @@
1
+ class Fog::Storage::Backblaze::Files < Fog::Collection
2
+ model Fog::Storage::Backblaze::File
3
+
4
+ attribute :directory
5
+ #attribute :common_prefixes, :aliases => "CommonPrefixes"
6
+ #attribute :delimiter, :aliases => "Delimiter"
7
+ #attribute :page_token, :aliases => %w(pageToken page_token)
8
+ #attribute :max_results, :aliases => ["MaxKeys", "max-keys"]
9
+ #attribute :prefix, :aliases => "Prefix"
10
+
11
+ model Fog::Storage::Backblaze::File
12
+
13
+ def all(options = {})
14
+ requires :directory
15
+
16
+ body = service.list_objects(directory.key, options).body
17
+ load(body["files"] || [])
18
+ end
19
+
20
+ def get(file_name)
21
+ requires :directory
22
+
23
+ file_response = service.get_object(directory.key, file_name)
24
+ file_data = _headers_to_attrs(file_response)
25
+
26
+ new(file_data)
27
+ end
28
+
29
+ def new(attributes = {})
30
+ requires :directory
31
+ super({directory: directory}.merge!(attributes))
32
+ end
33
+
34
+ # TODO: download url for private buckets
35
+ def get_https_url(file_name, expires, options)
36
+ requires :directory
37
+ service.get_object_https_url(directory.key, file_name, expires, options)
38
+ end
39
+
40
+ # TODO: download url for private buckets
41
+ def head_url(file_name, expires, options = {})
42
+ requires :directory
43
+ service.get_object_https_url(directory.key, key, expires, options)
44
+ end
45
+
46
+ def head(file_name, options = {})
47
+ requires :directory
48
+ data = service.head_object(directory.key, file_name, options)
49
+ file_data = _headers_to_attrs(file_response)
50
+ new(file_data)
51
+ rescue Excon::Errors::NotFound
52
+ nil
53
+ end
54
+
55
+ def _headers_to_attrs(file_response)
56
+ {
57
+ fileName: file_response.headers['x-bz-file-name'],
58
+ fileId: file_response.headers['x-bz-file-id'],
59
+ uploadTimestamp: file_response.headers['X-Bz-Upload-Timestamp'],
60
+ contentType: file_response.headers['Content-Type'],
61
+ contentLength: file_response.headers['Content-Length']
62
+ }
63
+ end
64
+
65
+ end
@@ -0,0 +1,426 @@
1
+ require 'json'
2
+ require 'digest'
3
+ require 'cgi'
4
+
5
+ class Fog::Storage::Backblaze < Fog::Service
6
+ requires :b2_account_id, :b2_account_token
7
+ recognizes :b2_bucket_name, :b2_bucket_id, :token_cache, :logger
8
+
9
+ model_path 'fog/backblaze/models'
10
+ model :directory
11
+ collection :directories
12
+ model :file
13
+ collection :files
14
+
15
+ class Mock
16
+ #include Integrity
17
+
18
+ def self.data
19
+ @data ||= Hash.new do |hash, key|
20
+ hash[key] = {}
21
+ end
22
+ end
23
+
24
+ def self.reset
25
+ @data = nil
26
+ end
27
+
28
+ def initialize(options={})
29
+ @b2_account_id = options[:b2_account_id]
30
+ @b2_account_token = options[:b2_account_token]
31
+ @path = '/v1/AUTH_1234'
32
+ @containers = {}
33
+ end
34
+
35
+ def data
36
+ self.class.data[@softlayer_username]
37
+ end
38
+
39
+ def reset_data
40
+ self.class.data.delete(@softlayer_username)
41
+ end
42
+
43
+ def change_account(account)
44
+ @original_path ||= @path
45
+ version_string = @original_path.split('/')[1]
46
+ @path = "/#{version_string}/#{account}"
47
+ end
48
+
49
+ def reset_account_name
50
+ @path = @original_path
51
+ end
52
+
53
+ end
54
+
55
+ class Real
56
+ attr_reader :token_cache, :options
57
+
58
+ def initialize(options = {})
59
+ @options = options
60
+ @logger = @options[:logger] || begin
61
+ require 'logger'
62
+ Logger.new("/dev/null")
63
+ end
64
+
65
+ @token_cache = if options[:token_cache].nil? || options[:token_cache] == :memory
66
+ Fog::Backblaze::TokenCache.new
67
+ elsif options[:token_cache] === false
68
+ Fog::Backblaze::TokenCache::NullTokenCache.new
69
+ elsif token_cache.is_a?(Fog::Backblaze::TokenCache)
70
+ token_cache
71
+ else
72
+ Fog::Backblaze::TokenCache::FileTokenCache.new(options[:token_cache])
73
+ end
74
+ end
75
+
76
+ def logger
77
+ @logger
78
+ end
79
+
80
+ ## Buckets
81
+
82
+ def put_bucket(key, extra_options = {})
83
+ options = {
84
+ accountId: @options[:b2_account_id],
85
+ bucketType: extra_options.delete(:public) ? 'allPublic' : 'allPrivate',
86
+ bucketName: key,
87
+ }.merge(extra_options)
88
+
89
+ response = b2_command(:b2_create_bucket, body: options)
90
+
91
+ if response.status >= 400
92
+ raise Fog::Errors::Error, "Failed put_bucket, status = #{response.status} #{response.body}"
93
+ end
94
+
95
+ if cached = @token_cache.buckets
96
+ @token_cache.buckets = cached.merge(key => response.json)
97
+ else
98
+ @token_cache.buckets = {key => response.json}
99
+ end
100
+
101
+ response
102
+ end
103
+
104
+ def list_buckets
105
+ response = b2_command(:b2_list_buckets, body: {accountId: @options[:b2_account_id]})
106
+
107
+ response
108
+ end
109
+
110
+ def get_bucket(bucket_name)
111
+ reponse = list_buckets
112
+ bucket = reponse.json['buckets'].detect do |bucket|
113
+ bucket['bucketName'] == bucket_name
114
+ end
115
+
116
+ unless bucket
117
+ raise Fog::Errors::NotFound, "No bucket with name: #{bucket_name}, " +
118
+ "found: #{reponse.json['buckets'].map {|b| b['bucketName']}.join(", ")}"
119
+ end
120
+
121
+ reponse.body = bucket
122
+ reponse.json = bucket
123
+ return reponse
124
+ end
125
+
126
+ def delete_bucket(bucket_name, options = {})
127
+ bucket_id = _get_bucket_id(bucket_name)
128
+
129
+ unless bucket_id
130
+ raise Fog::Errors::NotFound, "Can not bucket #{bucket_name}"
131
+ end
132
+
133
+ response = b2_command(:b2_delete_bucket,
134
+ body: {
135
+ bucketId: bucket_id,
136
+ accountId: @options[:b2_account_id]
137
+ }
138
+ )
139
+
140
+ if !options[:is_retrying]
141
+ if response.status == 400 && response.json['message'] =~ /Bucket .+ does not exist/
142
+ logger.info("Try drop cache and try again")
143
+ @token_cache.buckets = nil
144
+ return delete_bucket(bucket_name, is_retrying: true)
145
+ end
146
+ end
147
+
148
+ if response.status >= 400
149
+ raise Fog::Errors::Error, "Failed delete_bucket, status = #{response.status} #{response.body}"
150
+ end
151
+
152
+ if cached = @token_cache.buckets
153
+ cached.delete(bucket_name)
154
+ @token_cache.buckets = cached
155
+ end
156
+
157
+ response
158
+ end
159
+
160
+ ## Objects
161
+
162
+ def list_objects(bucket_name, options = {})
163
+ bucket_id = _get_bucket_id(bucket_name)
164
+
165
+ unless bucket_id
166
+ raise Fog::Errors::NotFound, "Can not bucket #{bucket_name}"
167
+ end
168
+
169
+ b2_command(:b2_list_file_names, body: {
170
+ bucketId: bucket_id,
171
+ maxFileCount: 10_000
172
+ }.merge(options))
173
+ end
174
+
175
+ def head_object(bucket_name, file_path)
176
+ file_url = get_object_url(bucket_name, file_path)
177
+
178
+ result = b2_command(nil,
179
+ method: :head,
180
+ url: file_url
181
+ )
182
+
183
+ if result.status == 404
184
+ raise Fog::Errors::NotFound, "Can not find #{file_path.inspect} in bucket #{bucket_name}"
185
+ end
186
+
187
+ if result.status >= 400
188
+ raise Fog::Errors::NotFound, "Backblaze respond with status = #{result.status} - #{result.reason_phrase}"
189
+ end
190
+
191
+ result
192
+ end
193
+
194
+ # TODO: handle options
195
+ def put_object(bucket_name, file_path, content, options = {})
196
+ upload_url = @token_cache.fetch("upload_url/#{bucket_name}") do
197
+ bucket_id = _get_bucket_id(bucket_name)
198
+ unless bucket_id
199
+ raise Fog::Errors::NotFound, "Can not find bucket #{bucket_name.inspect}"
200
+ end
201
+ result = b2_command(:b2_get_upload_url, body: {bucketId: _get_bucket_id(bucket_name)})
202
+ result.json
203
+ end
204
+
205
+ extra_headers = {}
206
+ if options[:content_type]
207
+ extra_headers['Content-Type'] = options[:content_type]
208
+ end
209
+ if options[:last_modified]
210
+ extra_headers['X-Bz-Info-src_last_modified_millis'] = options[:last_modified]
211
+ end
212
+ if options[:content_disposition]
213
+ extra_headers['X-Bz-Info-b2-content-disposition'] = options[:content_disposition]
214
+ end
215
+
216
+ response = b2_command(nil,
217
+ url: upload_url['uploadUrl'],
218
+ body: content,
219
+ headers: {
220
+ 'Authorization': upload_url['authorizationToken'],
221
+ 'Content-Type': 'b2/x-auto',
222
+ 'X-Bz-File-Name': "#{_esc_file(file_path)}",
223
+ 'X-Bz-Content-Sha1': Digest::SHA1.hexdigest(content)
224
+ }.merge(extra_headers)
225
+ )
226
+
227
+ if response.json['fileId'] == nil
228
+ raise Fog::Errors::Error, "Failed put_object, status = #{response.status} #{response.body}"
229
+ end
230
+
231
+ response
232
+ end
233
+
234
+ def get_object_url(bucket_name, file_path)
235
+ "#{auth_response['downloadUrl']}/file/#{CGI.escape(bucket_name)}/#{_esc_file(file_path)}"
236
+ end
237
+
238
+ alias_method :get_object_https_url, :get_object_url
239
+
240
+ def get_public_object_url(bucket_name, file_path, options = {})
241
+ bucket_id = _get_bucket_id(bucket_name)
242
+
243
+ unless bucket_id
244
+ raise Fog::Errors::NotFound, "Can not bucket #{bucket_name}"
245
+ end
246
+
247
+ result = b2_command(:b2_get_download_authorization, body: {
248
+ bucketId: bucket_id,
249
+ fileNamePrefix: file_path,
250
+ validDurationInSeconds: 604800
251
+ }.merge(options))
252
+
253
+ if result.status == 404
254
+ raise Fog::Errors::NotFound, "Can not find #{file_path.inspect} in bucket #{bucket_name}"
255
+ end
256
+
257
+ if result.status >= 400
258
+ raise Fog::Errors::NotFound, "Backblaze respond with status = #{result.status} - #{result.reason_phrase}"
259
+ end
260
+
261
+ "#{get_object_url(bucket_name, file_path)}?Authorization=#{result.json['authorizationToken']}"
262
+ end
263
+
264
+ def get_object(bucket_name, file_name)
265
+ file_url = get_object_url(bucket_name, file_name)
266
+
267
+ result = b2_command(nil,
268
+ method: :get,
269
+ url: file_url
270
+ )
271
+
272
+ if result.status == 404
273
+ raise Fog::Errors::NotFound, "Can not find #{file_name.inspect} in bucket #{bucket_name}"
274
+ end
275
+
276
+ return result
277
+ end
278
+
279
+ def delete_object(bucket_name, file_name)
280
+ version_ids = _get_object_version_ids(bucket_name, file_name)
281
+
282
+ if version_ids.size == 0
283
+ raise Fog::Errors::NotFound, "Can not find #{file_name} in in bucket #{bucket_name}"
284
+ end
285
+
286
+ logger.info("Deleting #{version_ids.size} versions of #{file_name}")
287
+
288
+ last_response = nil
289
+ version_ids.each do |version_id|
290
+ last_response = b2_command(:b2_delete_file_version, body: {
291
+ fileName: file_name,
292
+ fileId: version_id
293
+ })
294
+ end
295
+
296
+ last_response
297
+ end
298
+
299
+ def _get_object_version_ids(bucket_name, file_name)
300
+ response = b2_command(:b2_list_file_versions,
301
+ body: {
302
+ startFileName: file_name,
303
+ prefix: file_name,
304
+ bucketId: _get_bucket_id(bucket_name),
305
+ maxFileCount: 1000
306
+ }
307
+ )
308
+
309
+ if response.json['files']
310
+ version_ids = []
311
+ response.json['files'].map do |file_version|
312
+ version_ids << file_version['fileId'] if file_version['fileName'] == file_name
313
+ end
314
+ version_ids
315
+ else
316
+ []
317
+ end
318
+ end
319
+
320
+ def _get_bucket_id(bucket_name)
321
+ if @options[:b2_bucket_name] == bucket_name && @options[:b2_bucket_id]
322
+ return @options[:b2_bucket_id]
323
+ else
324
+ cached = @token_cache && @token_cache.buckets
325
+
326
+ if cached && cached[bucket_name]
327
+ return cached[bucket_name]['bucketId']
328
+ else
329
+ fetched = _cached_buchets_hash(force_fetch: !!cached)
330
+ return fetched[bucket_name] && fetched[bucket_name]['bucketId']
331
+ end
332
+ end
333
+ end
334
+
335
+ def _cached_buchets_hash(force_fetch: false)
336
+
337
+ if !force_fetch && cached = @token_cache.buckets
338
+ cached
339
+ end
340
+
341
+ buckets_hash = {}
342
+ list_buckets.json['buckets'].each do |bucket|
343
+ buckets_hash[bucket['bucketName']] = bucket
344
+ end
345
+
346
+ @token_cache.buckets = buckets_hash
347
+
348
+ buckets_hash
349
+ end
350
+
351
+ def auth_response
352
+ #return @auth_response.json if @auth_response
353
+
354
+ if cached = @token_cache.auth_response
355
+ logger.info("get token from cache")
356
+ return cached
357
+ end
358
+
359
+ @auth_response = json_req(:get, "https://api.backblazeb2.com/b2api/v1/b2_authorize_account",
360
+ headers: {
361
+ "Authorization" => "Basic " + Base64.strict_encode64("#{@options[:b2_account_id]}:#{@options[:b2_account_token]}")
362
+ },
363
+ persistent: false
364
+ )
365
+
366
+ if @auth_response.status >= 400
367
+ raise Fog::Errors::Error, "Authentication error: #{@auth_response.json['message']} (status = #{@auth_response.status})\n#{@auth_response.body}"
368
+ end
369
+
370
+ @token_cache.auth_response = @auth_response.json
371
+
372
+ @auth_response.json
373
+ end
374
+
375
+ def b2_command(command, options = {})
376
+ auth_response = self.auth_response
377
+ options[:headers] ||= {}
378
+ options[:headers]['Authorization'] ||= auth_response['authorizationToken']
379
+
380
+ if options[:body] && !options[:body].is_a?(String)
381
+ options[:body] = JSON.generate(options[:body])
382
+ end
383
+
384
+ request_url = options.delete(:url) || "#{auth_response['apiUrl']}/b2api/v1/#{command}"
385
+
386
+ #pp [:b2_command, request_url, options]
387
+
388
+ json_req(options.delete(:method) || :post, request_url, options)
389
+ end
390
+
391
+ def json_req(method, url, options = {})
392
+ start_time = Time.now.to_f
393
+ logger.info("Req #{method.to_s.upcase} #{url}")
394
+ logger.debug(options.to_s)
395
+
396
+ if !options.has_key?(:persistent) || options[:persistent] == true
397
+ @connections ||= {}
398
+ full_path = [URI.parse(url).request_uri, URI.parse(url).fragment].compact.join("#")
399
+ host_url = url.sub(full_path, "")
400
+ connection = @connections[host_url] ||= Excon.new(host_url, persistent: true)
401
+ http_response = connection.send(method, options.merge(path: full_path, idempotent: true))
402
+ else
403
+ http_response = Excon.send(method, url, options)
404
+ end
405
+
406
+ http_response.extend(Fog::Backblaze::JSONReponse)
407
+ http_response.assign_json_body! if http_response.josn_response?
408
+
409
+ http_response
410
+ ensure
411
+ status = http_response && http_response.status
412
+ logger.info("Done #{method.to_s.upcase} #{url} = #{status} (#{(Time.now.to_f - start_time).round(3)} sec)")
413
+ logger.debug(http_response.headers) if http_response
414
+ logger.debug(http_response.body) if http_response
415
+ end
416
+
417
+ def reset_token_cache
418
+ @token_cache.reset
419
+ end
420
+
421
+ def _esc_file(file_name)
422
+ CGI.escape(file_name).gsub('%2F', '/')
423
+ end
424
+ end
425
+ end
426
+
@@ -0,0 +1,134 @@
1
+ # Each request must have authentication header, recieved from b2_authorize_account
2
+ # Authentication token is active for 24 hours, same as uploadUrl. To make things faster we keep it in cache
3
+ #
4
+ # Avaliable cache storages: file, memory, null
5
+ #
6
+ # To create own cache storage (stored in redis for example)
7
+ #
8
+ # class RedisTokenCache < Fog::Backblaze::TokenCache
9
+ # def initialize(redis_url)
10
+ # @redis = Redis.new(redis_url)
11
+ # super()
12
+ # end
13
+ # def load_data
14
+ # raw_data = @redis.get("b2_token_cache")
15
+ # raw_data ? JSON.parse(raw_data) : {}
16
+ # end
17
+ # def save_data
18
+ # @redis.set("b2_token_cache", JSON.pretty_generate(@data))
19
+ # end
20
+ # end
21
+ #
22
+ # Fog::Storage.new(provider: 'backblaze', ..., token_cache: RedisTokenCache.new)
23
+ #
24
+
25
+ module Fog
26
+ module Backblaze
27
+ end
28
+ end
29
+
30
+ class Fog::Backblaze::TokenCache
31
+
32
+ def initialize
33
+ @data = load_data || {}
34
+ end
35
+
36
+ def load_data
37
+ end
38
+
39
+ def save_data
40
+ end
41
+
42
+ TTLS = {
43
+ auth_response: 3600 * 24,
44
+ buckets: 3600 * 24,
45
+ upload_url: 3600 * 24
46
+ }
47
+
48
+ def fetch(field)
49
+ if result = access_part(field)
50
+ result
51
+ else
52
+ result = yield
53
+ write_part(field, result, TTLS[field])
54
+ save_data
55
+ result
56
+ end
57
+ end
58
+
59
+ def auth_response
60
+ access_part(:auth_response)
61
+ end
62
+
63
+ def auth_response=(value)
64
+ write_part(:auth_response, value, 3600 * 24)
65
+ save_data
66
+ end
67
+
68
+ def buckets
69
+ access_part(:buckets)
70
+ end
71
+
72
+ def buckets=(value)
73
+ write_part(:buckets, value, 3600 * 24)
74
+ save_data
75
+ end
76
+
77
+ def upload_url
78
+
79
+ end
80
+
81
+ def access_part(name)
82
+ name = name.to_s
83
+ if @data[name] && ::DateTime.parse(@data[name]['expires_at']).to_time > ::Time.now
84
+ @data[name]['value']
85
+ end
86
+ end
87
+
88
+ def write_part(name, value, ttl = 3600)
89
+ ttl = 3600 if ttl.nil?
90
+ name = name.to_s
91
+ if value.nil?
92
+ @data.delete(name)
93
+ else
94
+ @data[name] = {
95
+ 'value' => value,
96
+ 'expires_at' => ::Time.at(::Time.now + ttl - 1).to_s
97
+ }
98
+ end
99
+ end
100
+
101
+ def reset
102
+ @data = {}
103
+ save_data
104
+ end
105
+
106
+ # stored in local file
107
+ class FileTokenCache < Fog::Backblaze::TokenCache
108
+
109
+ def initialize(file)
110
+ @file = file
111
+ super()
112
+ end
113
+
114
+ def load_data
115
+ if File.exist?(@file)
116
+ JSON.parse(File.open(@file, 'rb', &:read))
117
+ else
118
+ {}
119
+ end
120
+ end
121
+
122
+ def save_data
123
+ File.open(@file, 'wb') do |f|
124
+ f.write(JSON.pretty_generate(@data) + "\n")
125
+ end
126
+ end
127
+ end
128
+
129
+ # black hole, always clean cache
130
+ class NullTokenCache < Fog::Backblaze::TokenCache
131
+ def write_part(name, value, ttl = 3600)
132
+ end
133
+ end
134
+ end
@@ -0,0 +1,5 @@
1
+ module Fog
2
+ module Backblaze
3
+ VERSION = "0.1.1"
4
+ end
5
+ end
metadata ADDED
@@ -0,0 +1,84 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fog-backblaze
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.1
5
+ platform: ruby
6
+ authors:
7
+ - Pavel Evstigneev
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2018-03-27 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: fog-core
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '1.40'
20
+ - - "<"
21
+ - !ruby/object:Gem::Version
22
+ version: '3'
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ version: '1.40'
30
+ - - "<"
31
+ - !ruby/object:Gem::Version
32
+ version: '3'
33
+ description: Blackblade B2 stogate client for 'fog' gem, can be used for working with
34
+ files and buckets. E.g. carrierwave uploads
35
+ email:
36
+ - pavel.evst@gmail.com
37
+ executables: []
38
+ extensions: []
39
+ extra_rdoc_files: []
40
+ files:
41
+ - ".gitignore"
42
+ - ".travis.yml"
43
+ - Gemfile
44
+ - Gemfile.lock
45
+ - LICENSE
46
+ - README.md
47
+ - Rakefile
48
+ - bin/console
49
+ - bin/setup
50
+ - examples/example.rb
51
+ - fog-backblaze.gemspec
52
+ - lib/fog/backblaze.rb
53
+ - lib/fog/backblaze/json_response.rb
54
+ - lib/fog/backblaze/models/directories.rb
55
+ - lib/fog/backblaze/models/directory.rb
56
+ - lib/fog/backblaze/models/file.rb
57
+ - lib/fog/backblaze/models/files.rb
58
+ - lib/fog/backblaze/storage.rb
59
+ - lib/fog/backblaze/token_cache.rb
60
+ - lib/fog/backblaze/version.rb
61
+ homepage: https://github.com/fog/fog-backblaze
62
+ licenses: []
63
+ metadata: {}
64
+ post_install_message:
65
+ rdoc_options: []
66
+ require_paths:
67
+ - lib
68
+ required_ruby_version: !ruby/object:Gem::Requirement
69
+ requirements:
70
+ - - ">="
71
+ - !ruby/object:Gem::Version
72
+ version: '0'
73
+ required_rubygems_version: !ruby/object:Gem::Requirement
74
+ requirements:
75
+ - - ">="
76
+ - !ruby/object:Gem::Version
77
+ version: '0'
78
+ requirements: []
79
+ rubyforge_project:
80
+ rubygems_version: 2.6.8
81
+ signing_key:
82
+ specification_version: 4
83
+ summary: Module for the 'fog' gem to support Blackblade B2 stogate.
84
+ test_files: []