rest-ftp-daemon-remote-s3 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 31cf8655302c112d563c71b02e8ec94c2a61b801
4
+ data.tar.gz: 055ed157321fc8b564b0daa2c4db4b36740dd508
5
+ SHA512:
6
+ metadata.gz: 2abd8ecb845d5725357cf2dc637474dfe6f2fccaa4112d3dda8b0886567ea478f87033f8e221c6047f0046e76f07302fc0ca20928583864e9a6a56baa0cc4c5e
7
+ data.tar.gz: 4e0ccea3de1e1f5e9f675395d69335efa3c09fd472e6af45897374e7fe710818f42d32f04bcb029acf9c7882db999f5d98a712d30ccfd5a336b1d25bd609e6b5
data/Gemfile ADDED
@@ -0,0 +1,3 @@
1
+ source "http://rubygems.org"
2
+
3
+ gemspec
data/README.md ADDED
@@ -0,0 +1 @@
1
+ # rest-ftp-daemon-remote-s3
data/Rakefile ADDED
@@ -0,0 +1,18 @@
1
+ # encoding: utf-8
2
+ require "bundler/gem_tasks"
3
+ require "rubygems"
4
+
5
+ require "rspec/core/rake_task"
6
+ RSpec::Core::RakeTask.new(:spec)
7
+
8
+ require "rubocop/rake_task"
9
+ RuboCop::RakeTask.new(:rubocop) do |task|
10
+ task.fail_on_error = false
11
+ end
12
+
13
+ # Load my own tasks
14
+ Dir.glob('lib/tasks/*.rake').each { |r| import r }
15
+
16
+ # Run specs by default
17
+ desc "Run all tests"
18
+ task default: [:spec, :rubocop]
@@ -0,0 +1,175 @@
1
+ # Dependencies
2
+ require 'aws-sdk-resources'
3
+
4
+ # Register this handler
5
+ module URI
6
+ class S3 < Generic; end
7
+ @@schemes["S3"] = S3
8
+ end
9
+
10
+ # Handle sFTP transfers for Remote class
11
+ module RestFtpDaemon::Remote
12
+ class RemoteS3 < RemoteBase
13
+
14
+ # Defaults
15
+ S3_MIN_PART = 5*MB
16
+ S3_MAX_COUNT = 10_000
17
+
18
+ # Class options
19
+ attr_reader :client
20
+ attr_reader :target
21
+
22
+ # URI schemes handled by this plugin
23
+ def self.handles
24
+ [URI::S3]
25
+ end
26
+
27
+ def initialize target, job, config
28
+ super
29
+ end
30
+
31
+ def connect
32
+ super
33
+
34
+ # Connect remote server
35
+ @client = Aws::S3::Client.new(
36
+ region: @target.aws_region,
37
+ credentials: Aws::Credentials.new(@target.aws_id, @target.aws_secret),
38
+ http_wire_trace: debug_enabled
39
+ )
40
+
41
+ rescue Exception => exception
42
+ raise RemoteConnectError, "#{exception.class}: #{exception.message}"
43
+ end
44
+
45
+ def size_if_exists target
46
+ log_debug "size_if_exists rel[#{target.path_rel}]"
47
+ object = @client.get_object(bucket: target.aws_bucket, key: target.path_rel)
48
+ rescue Aws::S3::Errors::NotFound => e
49
+ return false
50
+ else
51
+ return object.content_length
52
+ end
53
+
54
+ def push source, target, &callback
55
+ # Push init
56
+ raise RestFtpDaemon::AssertionFailed, "push/client" if @client.nil?
57
+
58
+ # Do the transfer, passing the file to the best method
59
+ File.open(source.path_abs, 'r', encoding: 'BINARY') do |file|
60
+ if file.size >= S3_MIN_PART
61
+ upload_multipart file, target.aws_bucket, target.path_rel, target.name, &callback
62
+ else
63
+ upload_onefile file, target.aws_bucket, target.path_rel, target.name, &callback
64
+ end
65
+ end
66
+ end
67
+
68
+ def move source, target
69
+ # Identify the source object
70
+ # obj = @client.bucket(source.aws_bucket).object(source.path_rel)
71
+ # raise RestFtpDaemon::AssertionFailed, "move: object not found" unless obj
72
+
73
+ # Move the file
74
+ # log_debug "move: copy bucket[#{source.aws_bucket}] source[#{source.path_rel}] target[#{target.path_rel}]"
75
+ @client.copy_object(bucket: source.aws_bucket, key: target.path_rel, copy_source: "#{source.aws_bucket}/#{source.path_rel}")
76
+ # log_debug "move: delete bucket[#{source.aws_bucket}] source[#{source.path_rel}]"
77
+ @client.delete_object(bucket: source.aws_bucket, key: source.path_rel)
78
+ # log_debug "move: done"
79
+
80
+ # Move the file
81
+ # obj.move_to(target.path_rel, :bucket_name => target.aws_bucket)
82
+ end
83
+
84
+ def connected?
85
+ !@client.nil?
86
+ end
87
+
88
+ private
89
+
90
+ def upload_onefile file, s3_bucket, s3_path, s3_name, &callback
91
+ log_debug "push: put_object", {
92
+ s3_bucket: s3_bucket,
93
+ s3_path: s3_path,
94
+ }
95
+ @client.put_object(bucket: s3_bucket, key: s3_path, body: file)
96
+ end
97
+
98
+ def upload_multipart file, s3_bucket, s3_path, s3_name, &callback
99
+ # Init
100
+ current_part = 1
101
+
102
+ # Compute parameters
103
+ file_size = file.size
104
+ parts_size = compute_parts_size(file_size)
105
+ parts_count = (file_size.to_f / parts_size).ceil
106
+
107
+ # Prepare basic opts
108
+ options = {
109
+ bucket: s3_bucket,
110
+ key: s3_path,
111
+ }
112
+
113
+ # Declare multipart upload
114
+ mpu_create_response = @client.create_multipart_upload(options)
115
+ options[:upload_id] = mpu_create_response.upload_id
116
+ log_debug "push: create_multipart_upload", {
117
+ s3_bucket: s3_bucket,
118
+ s3_path: s3_path,
119
+ upload_id: options[:upload_id],
120
+ file_size: format_bytes(file_size, "B"),
121
+ parts_size: format_bytes(parts_size, "B"),
122
+ parts_count: parts_count
123
+ }
124
+
125
+ # Upload each part
126
+ file.each_part(parts_size) do |part|
127
+ # Prepare part upload
128
+ opts = options.merge({
129
+ body: part,
130
+ part_number: current_part,
131
+ })
132
+ part_size = part.bytesize
133
+ log_debug "upload_part [#{current_part}/#{parts_count}] part_size[#{part_size}]"
134
+
135
+ # Push this over there
136
+ resp = @client.upload_part(opts)
137
+
138
+ # Send progress info upwards
139
+ yield part_size, s3_name
140
+
141
+ # Increment part number
142
+ current_part += 1
143
+ end
144
+
145
+ # Retrieve parts and complete upload
146
+ parts_resp = @client.list_parts(options)
147
+ those_parts = parts_resp.parts.map do |part|
148
+ { part_number: part.part_number, etag: part.etag }
149
+ end
150
+ opts = options.merge({
151
+ multipart_upload: {
152
+ parts: those_parts
153
+ }
154
+ })
155
+ log_debug "complete_multipart_upload"
156
+ mpu_complete_response = @client.complete_multipart_upload(opts)
157
+ end
158
+
159
+ def compute_parts_size filesize
160
+ # Initial part size is minimal
161
+ partsize_mini = S3_MIN_PART
162
+
163
+ # Other partsize if too many blocks
164
+ partsize_bigf = (filesize.to_f / S3_MAX_COUNT).ceil
165
+
166
+ # Decide
167
+ return [partsize_mini, partsize_bigf].max
168
+ end
169
+
170
+ def debug_enabled
171
+ @config[:debug_s3]
172
+ end
173
+
174
+ end
175
+ end
@@ -0,0 +1,29 @@
1
+ # coding: utf-8
2
+ Gem::Specification.new do |spec|
3
+
4
+ # Project version
5
+ spec.version = "0.0.1"
6
+
7
+ # Project description
8
+ spec.name = "rest-ftp-daemon-remote-s3"
9
+ spec.authors = ["Bruno MEDICI"]
10
+ spec.email = "rftpd-project@bmconseil.com"
11
+ spec.description = "rest-ftp-daemon plugin: S3"
12
+ spec.summary = "rest-ftp-daemon plugin: transfer files from/to Amazon S3"
13
+ spec.homepage = "http://github.com/bmedici/#{spec.name}"
14
+ spec.licenses = ["MIT"]
15
+ spec.date = Time.now.strftime("%Y-%m-%d")
16
+
17
+ # List files and executables
18
+ spec.files = `git ls-files -z`.split("\x0")
19
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
20
+ spec.require_paths = ["lib"]
21
+ spec.required_ruby_version = ">= 2.3"
22
+
23
+ # Runtime dependencies
24
+ spec.add_runtime_dependency "aws-sdk-resources", '~> 2.6'
25
+
26
+ # Development dependencies
27
+ spec.add_development_dependency "bundler", "~> 1.6"
28
+ spec.add_development_dependency "rake"
29
+ end
metadata ADDED
@@ -0,0 +1,90 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: rest-ftp-daemon-remote-s3
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Bruno MEDICI
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2017-07-26 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: aws-sdk-resources
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '2.6'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '2.6'
27
+ - !ruby/object:Gem::Dependency
28
+ name: bundler
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '1.6'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '1.6'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ description: 'rest-ftp-daemon plugin: S3'
56
+ email: rftpd-project@bmconseil.com
57
+ executables: []
58
+ extensions: []
59
+ extra_rdoc_files: []
60
+ files:
61
+ - Gemfile
62
+ - README.md
63
+ - Rakefile
64
+ - lib/plugins/rest-ftp-daemon/remote/remote_s3.rb
65
+ - rest-ftp-daemon-remote-s3.gemspec
66
+ homepage: http://github.com/bmedici/rest-ftp-daemon-remote-s3
67
+ licenses:
68
+ - MIT
69
+ metadata: {}
70
+ post_install_message:
71
+ rdoc_options: []
72
+ require_paths:
73
+ - lib
74
+ required_ruby_version: !ruby/object:Gem::Requirement
75
+ requirements:
76
+ - - ">="
77
+ - !ruby/object:Gem::Version
78
+ version: '2.3'
79
+ required_rubygems_version: !ruby/object:Gem::Requirement
80
+ requirements:
81
+ - - ">="
82
+ - !ruby/object:Gem::Version
83
+ version: '0'
84
+ requirements: []
85
+ rubyforge_project:
86
+ rubygems_version: 2.5.1
87
+ signing_key:
88
+ specification_version: 4
89
+ summary: 'rest-ftp-daemon plugin: transfer files from/to Amazon S3'
90
+ test_files: []