bkblz 0.1.12 → 0.1.13
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/README.rb +6 -0
- data/lib/bkblz/config.rb +3 -1
- data/lib/bkblz/task/upload_file.rb +62 -10
- data/lib/bkblz/v1/all.rb +8 -4
- data/lib/bkblz/v1/finish_large_file.rb +30 -0
- data/lib/bkblz/v1/get_upload_part_url.rb +22 -0
- data/lib/bkblz/v1/models.rb +10 -2
- data/lib/bkblz/v1/start_large_file.rb +40 -0
- data/lib/bkblz/v1/upload_part.rb +53 -0
- data/lib/bkblz/version.rb +1 -1
- metadata +7 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 7f294d136a3d0f442f0b416e5f7b6559d675f6f0ab041eee9429bdcbece338eb
|
4
|
+
data.tar.gz: caac5d40546eb5de9e41ebb1e63ceece8a91dd33d94f7a6c995fa6dc8bd2cf5c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d20fe9ccad91955038065f733f5f503a0552f4397c92256ff137d8793d0b992f6e30a8cdb0085906113f5588cfe2f840f8a887055f39714baecc45f20b346726
|
7
|
+
data.tar.gz: a424d090a142412fde903cc7a1dc510fd705c7a88767e279e70d1d10b428231ccbc11155c5ca7b72d960a5c12ada2f88776a0765c9fe93067373cf493d77fdbd
|
data/README.rb
CHANGED
@@ -16,10 +16,14 @@ Currently the gem supports the following V1 API calls:
|
|
16
16
|
* b2_delete_bucket
|
17
17
|
* b2_delete_file_version
|
18
18
|
* b2_get_file_info
|
19
|
+
* b2_get_upload_part_url
|
20
|
+
* b2_finish_large_file
|
19
21
|
* b2_list_buckets
|
20
22
|
* b2_list_file_names
|
21
23
|
* b2_list_file_versions
|
24
|
+
* b2_start_large_file
|
22
25
|
* b2_upload_file
|
26
|
+
* b2_upload_part
|
23
27
|
* b2_download_file_by_id
|
24
28
|
* b2_download_file_by_name
|
25
29
|
|
@@ -193,6 +197,8 @@ def run_readme
|
|
193
197
|
byte_range_download = session.send(
|
194
198
|
Bkblz::V1::DownloadFileByNameRequest.new bucket, file_name, bytes).to_model
|
195
199
|
Bkblz.log.info "file bytes: #{byte_range_download.body}"
|
200
|
+
|
201
|
+
# TODO: add examples for uploading large files by parts... see lib/bkblz/task/upload_file
|
196
202
|
rescue => e
|
197
203
|
Bkblz.log.error "there was an error: #{e}"
|
198
204
|
Bkblz.log.error e.backtrace.join "\n"
|
data/lib/bkblz/config.rb
CHANGED
@@ -9,7 +9,9 @@ module Bkblz
|
|
9
9
|
|
10
10
|
:log_device => :stderr, # [:stdout, :stderr, :devnull, path, fd]
|
11
11
|
:log_level => :warn, # [:debug, :info, :warn, :error, :fatal, (-6..-1)]
|
12
|
-
:log_colorize => true
|
12
|
+
:log_colorize => true,
|
13
|
+
|
14
|
+
:large_file_max_chunk_size => 1e8, # 100MB
|
13
15
|
}.freeze
|
14
16
|
|
15
17
|
attr_reader *CONFIG_VARS.keys, :config_map
|
@@ -19,15 +19,6 @@ module Bkblz
|
|
19
19
|
task_param :gzip
|
20
20
|
|
21
21
|
def run_internal(session, params)
|
22
|
-
file_body = if params[:file_path]
|
23
|
-
f = ::File.new(params[:file_path], "r")
|
24
|
-
f.read
|
25
|
-
elsif params[:file_body]
|
26
|
-
params[:file_body]
|
27
|
-
else
|
28
|
-
raise 'missing either :file_body or :file_path param'
|
29
|
-
end
|
30
|
-
|
31
22
|
file_name = if params[:file_name]
|
32
23
|
params[:file_name]
|
33
24
|
elsif params[:file_path]
|
@@ -42,13 +33,74 @@ module Bkblz
|
|
42
33
|
Time.now
|
43
34
|
end.to_i * 1000
|
44
35
|
|
36
|
+
bucket = find_bucket_by_name session, params[:bucket_name]
|
37
|
+
size = file_size(params)
|
38
|
+
|
39
|
+
file_data = {
|
40
|
+
:file_name => file_name,
|
41
|
+
:file_mtime => file_mtime,
|
42
|
+
:file_size => size,
|
43
|
+
:bucket => bucket,
|
44
|
+
}
|
45
|
+
|
46
|
+
if size > config.large_file_max_chunk_size
|
47
|
+
upload_large_file session, params, **file_data
|
48
|
+
else
|
49
|
+
upload_file session, params, **file_data
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
def file_size(params)
|
55
|
+
if params[:file_path]
|
56
|
+
::File.new(params[:file_path]).size
|
57
|
+
elsif params[:file_body]
|
58
|
+
params[:file_body].size
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def upload_large_file(session, params, bucket:, file_name:, file_mtime:, **file_data)
|
63
|
+
start_large_file_info = session.send(Bkblz::V1::StartLargeFileRequest.new(
|
64
|
+
bucket.bucket_id, file_name, file_mtime)).to_model
|
65
|
+
file_id = start_large_file_info.file_id
|
66
|
+
|
67
|
+
upload_part_auth = session.send(Bkblz::V1::GetUploadPartUrlRequest.new(file_id)).to_model
|
68
|
+
|
69
|
+
actual_size = file_data[:file_size]
|
70
|
+
chunk_size = config.large_file_max_chunk_size
|
71
|
+
num_chunks = (actual_size / chunk_size.to_f).ceil
|
72
|
+
|
73
|
+
file_io = if params[:file_path]
|
74
|
+
::File.new(params[:file_path], "rb")
|
75
|
+
else
|
76
|
+
raise 'only file_path is supported for large file uploads'
|
77
|
+
end
|
78
|
+
|
79
|
+
upload_part_infos = (0..num_chunks - 1).map do |chunk_i|
|
80
|
+
session.send(Bkblz::V1::UploadPartRequest.new(
|
81
|
+
upload_part_auth, file_io, chunk_i, chunk_size)).to_model
|
82
|
+
end
|
83
|
+
|
84
|
+
file_info = session.send(
|
85
|
+
Bkblz::V1::FinishLargeFileRequest.new(file_id, upload_part_infos)).to_model
|
86
|
+
end
|
87
|
+
|
88
|
+
def upload_file(session, params, bucket:, file_name:, file_mtime:, **file_data)
|
89
|
+
file_body = if params[:file_path]
|
90
|
+
f = ::File.new(params[:file_path], "r")
|
91
|
+
f.read
|
92
|
+
elsif params[:file_body]
|
93
|
+
params[:file_body]
|
94
|
+
else
|
95
|
+
raise 'missing either :file_body or :file_path param'
|
96
|
+
end
|
97
|
+
|
45
98
|
if params[:gzip]
|
46
99
|
# https://ruby-doc.org/stdlib-2.4.2/libdoc/zlib/rdoc/Zlib.html#method-c-gzip
|
47
100
|
file_body = Zlib.gzip file_body, level: Zlib::DEFAULT_COMPRESSION
|
48
101
|
file_name << ".gz" unless file_name =~ /\.gz$/
|
49
102
|
end
|
50
103
|
|
51
|
-
bucket = find_bucket_by_name session, params[:bucket_name]
|
52
104
|
upload_auth = session.send(
|
53
105
|
Bkblz::V1::GetUploadUrlRequest.new bucket.bucket_id).to_model
|
54
106
|
|
data/lib/bkblz/v1/all.rb
CHANGED
@@ -7,13 +7,17 @@ require_relative "request"
|
|
7
7
|
require_relative "error_response"
|
8
8
|
|
9
9
|
require_relative "authorize_account"
|
10
|
-
require_relative "list_buckets"
|
11
10
|
require_relative "create_bucket"
|
12
11
|
require_relative "delete_bucket"
|
12
|
+
require_relative "delete_file_version"
|
13
|
+
require_relative "download_file"
|
14
|
+
require_relative "finish_large_file"
|
13
15
|
require_relative "get_file_info"
|
16
|
+
require_relative "get_upload_part_url"
|
14
17
|
require_relative "get_upload_url"
|
15
|
-
require_relative "
|
18
|
+
require_relative "list_buckets"
|
16
19
|
require_relative "list_file_names"
|
17
20
|
require_relative "list_file_versions"
|
18
|
-
require_relative "
|
19
|
-
require_relative "
|
21
|
+
require_relative "start_large_file"
|
22
|
+
require_relative "upload_file"
|
23
|
+
require_relative "upload_part"
|
@@ -0,0 +1,30 @@
|
|
1
|
+
require "digest/sha1"
|
2
|
+
|
3
|
+
module Bkblz
|
4
|
+
module V1
|
5
|
+
|
6
|
+
class FinishLargeFileResponse < Response
|
7
|
+
response_model Model::FileInfo
|
8
|
+
end
|
9
|
+
|
10
|
+
class FinishLargeFileRequest < Request
|
11
|
+
|
12
|
+
response_class FinishLargeFileResponse
|
13
|
+
url_suffix "/b2api/v1/b2_finish_large_file"
|
14
|
+
|
15
|
+
def initialize(file_id, file_part_infos)
|
16
|
+
sha1_sums = file_part_infos.sort { |a, b| a.part_number <=> b.part_number }.map do |info|
|
17
|
+
info.content_sha1
|
18
|
+
end
|
19
|
+
@body = {
|
20
|
+
:file_id => file_id,
|
21
|
+
:part_sha1_array => sha1_sums,
|
22
|
+
}
|
23
|
+
end
|
24
|
+
|
25
|
+
def build_request(session)
|
26
|
+
session.create_post url(session), @body
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
module Bkblz
|
2
|
+
module V1
|
3
|
+
|
4
|
+
class GetUploadPartUrlResponse < Response
|
5
|
+
response_model Model::UploadPartAuth
|
6
|
+
end
|
7
|
+
|
8
|
+
class GetUploadPartUrlRequest < Request
|
9
|
+
|
10
|
+
response_class GetUploadPartUrlResponse
|
11
|
+
url_suffix "/b2api/v1/b2_get_upload_part_url"
|
12
|
+
|
13
|
+
def initialize(file_id)
|
14
|
+
@body = {:file_id => file_id}
|
15
|
+
end
|
16
|
+
|
17
|
+
def build_request(session)
|
18
|
+
session.create_post url(session), @body
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
data/lib/bkblz/v1/models.rb
CHANGED
@@ -15,18 +15,26 @@ module Bkblz
|
|
15
15
|
:body, :content_length, :content_type, :file_id, :file_name, :sha1, :x_bz_info
|
16
16
|
]
|
17
17
|
|
18
|
-
# Returned by upload_file
|
18
|
+
# Returned by upload_file, finish_large_file, start_large_file
|
19
19
|
FileInfo = Model.define *[
|
20
|
-
:account_id, :bucket_id, :content_length, :content_sha1, :content_type,
|
20
|
+
:account_id, :action, :bucket_id, :content_length, :content_sha1, :content_type,
|
21
21
|
:file_id, :file_info, :file_name
|
22
22
|
]
|
23
23
|
|
24
|
+
# Returned by upload_file
|
25
|
+
FilePartInfo = Model.define *[
|
26
|
+
:file_id, :part_number, :content_length, :content_sha1, :upload_timestamp,
|
27
|
+
]
|
28
|
+
|
24
29
|
# Returned by delete_file_version
|
25
30
|
PartialFileInfo = Model.define :file_id, :file_name
|
26
31
|
|
27
32
|
# Returned by get_upload_url
|
28
33
|
UploadAuth = Model.define :bucket_id, :upload_url, :authorization_token
|
29
34
|
|
35
|
+
# Returned by get_upload_part_url
|
36
|
+
UploadPartAuth = Model.define :file_id, :upload_url, :authorization_token
|
37
|
+
|
30
38
|
# Possibly returned by any request
|
31
39
|
Error = Model.define :status, :code, :message
|
32
40
|
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
require "digest/sha1"
|
2
|
+
|
3
|
+
module Bkblz
|
4
|
+
module V1
|
5
|
+
|
6
|
+
class StartLargeFileResponse < Response
|
7
|
+
response_model Model::FileInfo
|
8
|
+
end
|
9
|
+
|
10
|
+
class StartLargeFileRequest < Request
|
11
|
+
|
12
|
+
response_class StartLargeFileResponse
|
13
|
+
url_suffix "/b2api/v1/b2_start_large_file"
|
14
|
+
|
15
|
+
def initialize(bucket_id, file_name, last_modified_millis=nil,
|
16
|
+
file_sha=nil, content_type='b2/x-auto', **file_info)
|
17
|
+
# Both of the following are recommended here:
|
18
|
+
# https://www.backblaze.com/b2/docs/b2_start_large_file.html
|
19
|
+
if last_modified_millis
|
20
|
+
# must be a string
|
21
|
+
file_info[:src_last_modified_millis] = last_modified_millis.to_s
|
22
|
+
end
|
23
|
+
if file_sha
|
24
|
+
file_info[:large_file_sha1] = file_sha
|
25
|
+
end
|
26
|
+
|
27
|
+
@body = {
|
28
|
+
:bucket_id => bucket_id,
|
29
|
+
:file_name => file_name,
|
30
|
+
:content_type => content_type,
|
31
|
+
:file_info => file_info
|
32
|
+
}
|
33
|
+
end
|
34
|
+
|
35
|
+
def build_request(session)
|
36
|
+
session.create_post url(session), @body
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
require "digest/sha1"
|
2
|
+
|
3
|
+
module Bkblz
|
4
|
+
module V1
|
5
|
+
|
6
|
+
class UploadPartResponse < Response
|
7
|
+
response_model Model::FilePartInfo
|
8
|
+
end
|
9
|
+
|
10
|
+
class UploadPartRequest < Request
|
11
|
+
|
12
|
+
response_class UploadPartResponse
|
13
|
+
|
14
|
+
REQUIRED_HEADERS = {
|
15
|
+
:"Authorization" => nil,
|
16
|
+
:"Content-Length" => nil,
|
17
|
+
:"Content-Type" => 'application/octet-stream',
|
18
|
+
:"X-Bz-Part-Number" => nil, # a value in [1..10000]
|
19
|
+
:"X-Bz-Content-Sha1" => nil
|
20
|
+
}
|
21
|
+
|
22
|
+
##
|
23
|
+
# @param {chunk_number} is a value in [0...9999]
|
24
|
+
def initialize(upload_part_auth, io, chunk_number, chunk_size)
|
25
|
+
@upload_url = upload_part_auth.upload_url
|
26
|
+
@body_chunk = read_chunk(io, chunk_number, chunk_size)
|
27
|
+
@headers = REQUIRED_HEADERS.dup
|
28
|
+
|
29
|
+
part_number = chunk_number + 1
|
30
|
+
@headers[:"Authorization"] = upload_part_auth.authorization_token
|
31
|
+
@headers[:"Content-Length"] = @body_chunk.size
|
32
|
+
@headers[:"X-Bz-Part-Number"] = part_number
|
33
|
+
@headers[:"X-Bz-Content-Sha1"] = Digest::SHA1.hexdigest @body_chunk
|
34
|
+
end
|
35
|
+
|
36
|
+
def build_request(session)
|
37
|
+
session.create_post @upload_url, @body_chunk, @headers
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
def read_chunk(io, chunk_number, chunk_size)
|
42
|
+
unless io.is_a?(IO)
|
43
|
+
raise 'only IO type is supported for upload_part'
|
44
|
+
end
|
45
|
+
|
46
|
+
byte = chunk_number * chunk_size
|
47
|
+
# https://ruby-doc.org/core-2.5/IO.html#method-i-seek
|
48
|
+
io.seek byte, IO::SEEK_SET
|
49
|
+
io.read(chunk_size)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
data/lib/bkblz/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bkblz
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.13
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Erick Johnson
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2019-01-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: thor
|
@@ -105,7 +105,9 @@ files:
|
|
105
105
|
- lib/bkblz/v1/delete_file_version.rb
|
106
106
|
- lib/bkblz/v1/download_file.rb
|
107
107
|
- lib/bkblz/v1/error_response.rb
|
108
|
+
- lib/bkblz/v1/finish_large_file.rb
|
108
109
|
- lib/bkblz/v1/get_file_info.rb
|
110
|
+
- lib/bkblz/v1/get_upload_part_url.rb
|
109
111
|
- lib/bkblz/v1/get_upload_url.rb
|
110
112
|
- lib/bkblz/v1/list_buckets.rb
|
111
113
|
- lib/bkblz/v1/list_file_names.rb
|
@@ -115,7 +117,9 @@ files:
|
|
115
117
|
- lib/bkblz/v1/request.rb
|
116
118
|
- lib/bkblz/v1/response.rb
|
117
119
|
- lib/bkblz/v1/session.rb
|
120
|
+
- lib/bkblz/v1/start_large_file.rb
|
118
121
|
- lib/bkblz/v1/upload_file.rb
|
122
|
+
- lib/bkblz/v1/upload_part.rb
|
119
123
|
- lib/bkblz/version.rb
|
120
124
|
homepage: https://github.com/erickj/bkblz
|
121
125
|
licenses:
|
@@ -137,7 +141,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
137
141
|
version: '0'
|
138
142
|
requirements: []
|
139
143
|
rubyforge_project:
|
140
|
-
rubygems_version: 2.
|
144
|
+
rubygems_version: 2.7.7
|
141
145
|
signing_key:
|
142
146
|
specification_version: 4
|
143
147
|
summary: Bkblz GEM for the Backblaze B2 API. https://www.backblaze.com/b2/docs/
|