iostreams 1.2.1 → 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +19 -4
- data/lib/io_streams/builder.rb +27 -10
- data/lib/io_streams/bzip2/reader.rb +3 -3
- data/lib/io_streams/bzip2/writer.rb +3 -3
- data/lib/io_streams/deprecated.rb +1 -1
- data/lib/io_streams/encode/reader.rb +1 -3
- data/lib/io_streams/encode/writer.rb +1 -1
- data/lib/io_streams/errors.rb +22 -0
- data/lib/io_streams/io_streams.rb +1 -5
- data/lib/io_streams/line/reader.rb +28 -16
- data/lib/io_streams/path.rb +3 -1
- data/lib/io_streams/paths/file.rb +4 -4
- data/lib/io_streams/paths/http.rb +6 -3
- data/lib/io_streams/paths/s3.rb +30 -8
- data/lib/io_streams/paths/sftp.rb +34 -13
- data/lib/io_streams/pgp.rb +84 -71
- data/lib/io_streams/stream.rb +78 -12
- data/lib/io_streams/tabular.rb +28 -27
- data/lib/io_streams/tabular/header.rb +14 -12
- data/lib/io_streams/tabular/parser/csv.rb +4 -2
- data/lib/io_streams/tabular/parser/fixed.rb +166 -26
- data/lib/io_streams/tabular/utility/csv_row.rb +1 -4
- data/lib/io_streams/utils.rb +4 -4
- data/lib/io_streams/version.rb +1 -1
- data/lib/io_streams/zip/reader.rb +1 -1
- data/test/builder_test.rb +29 -0
- data/test/bzip2_writer_test.rb +6 -4
- data/test/deprecated_test.rb +2 -0
- data/test/files/test.psv +4 -0
- data/test/files/unclosed_quote_large_test.csv +1658 -0
- data/test/files/unclosed_quote_test2.csv +3 -0
- data/test/io_streams_test.rb +2 -2
- data/test/line_reader_test.rb +30 -4
- data/test/paths/file_test.rb +1 -1
- data/test/paths/s3_test.rb +3 -3
- data/test/paths/sftp_test.rb +4 -4
- data/test/pgp_test.rb +54 -4
- data/test/pgp_writer_test.rb +3 -3
- data/test/stream_test.rb +174 -8
- data/test/tabular_test.rb +100 -40
- data/test/test_helper.rb +1 -1
- metadata +47 -42
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a6dd68508835099ef4c9de7f81a9527927d3225138f0bf5ecd00c44194e11858
|
4
|
+
data.tar.gz: 28535e95ca83a4cf0c522de4cd48889a489b4fcefdc8feb9a7bfe0b70124fd7a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8e4d38ef41234f62fdcfde1ae3a96fe59203dcfc38562843106b081c7292efffdda36b88707674059a24b0fa2996d4c1dbe0627694e80a9b12d23d47195a53d3
|
7
|
+
data.tar.gz: 23e16854c305542ee0976f570444d7b99cadcb6ec460d83bebc73a65cba9c28e3beba485c9943797b174b80bdd783c216d89d539e8aecdbda62e1c31f017efce
|
data/README.md
CHANGED
@@ -1,7 +1,8 @@
|
|
1
|
-
#
|
2
|
-
[](https://rubygems.org/gems/iostreams) [](https://rubygems.org/gems/iostreams) [](https://rubygems.org/gems/iostreams) [](http://opensource.org/licenses/Apache-2.0) 
|
3
3
|
|
4
|
-
|
4
|
+
IOStreams is an incredibly powerful streaming library that makes changes to file formats, compression, encryption,
|
5
|
+
or storage mechanism transparent to the application.
|
5
6
|
|
6
7
|
## Project Status
|
7
8
|
|
@@ -9,7 +10,21 @@ Production Ready, heavily used in production environments, many as part of Rocke
|
|
9
10
|
|
10
11
|
## Documentation
|
11
12
|
|
12
|
-
[
|
13
|
+
Start with the [IOStreams tutorial](https://iostreams.rocketjob.io/tutorial) to get a great introduction to IOStreams.
|
14
|
+
|
15
|
+
Next, checkout the remaining [IOStreams documentation](https://iostreams.rocketjob.io/)
|
16
|
+
|
17
|
+
## Upgrading to v1.6
|
18
|
+
|
19
|
+
The old, deprecated api's are no longer loaded by default with v1.6. To add back the deprecated api support, add
|
20
|
+
the following line to your code:
|
21
|
+
|
22
|
+
~~~ruby
|
23
|
+
IOStreams.include(IOStreams::Deprecated)
|
24
|
+
~~~
|
25
|
+
|
26
|
+
It is important to move any of the old deprecated apis over to the new api, since they will be removed in a future
|
27
|
+
release.
|
13
28
|
|
14
29
|
## Versioning
|
15
30
|
|
data/lib/io_streams/builder.rb
CHANGED
@@ -1,13 +1,15 @@
|
|
1
1
|
module IOStreams
|
2
2
|
# Build the streams that need to be applied to a path druing reading or writing.
|
3
3
|
class Builder
|
4
|
-
attr_accessor :file_name
|
4
|
+
attr_accessor :file_name, :format_options
|
5
5
|
attr_reader :streams, :options
|
6
6
|
|
7
7
|
def initialize(file_name = nil)
|
8
|
-
@file_name
|
9
|
-
@streams
|
10
|
-
@options
|
8
|
+
@file_name = file_name
|
9
|
+
@streams = nil
|
10
|
+
@options = nil
|
11
|
+
@format = nil
|
12
|
+
@format_option = nil
|
11
13
|
end
|
12
14
|
|
13
15
|
# Supply an option that is only applied once the file name extensions have been parsed.
|
@@ -20,7 +22,7 @@ module IOStreams
|
|
20
22
|
raise(ArgumentError, "Cannot call #option unless the `file_name` was already set}") unless file_name
|
21
23
|
|
22
24
|
@options ||= {}
|
23
|
-
if opts = @options[stream]
|
25
|
+
if (opts = @options[stream])
|
24
26
|
opts.merge!(options)
|
25
27
|
else
|
26
28
|
@options[stream] = options.dup
|
@@ -40,7 +42,7 @@ module IOStreams
|
|
40
42
|
raise(ArgumentError, "Invalid stream: #{stream.inspect}") unless IOStreams.extensions.include?(stream)
|
41
43
|
|
42
44
|
@streams ||= {}
|
43
|
-
if opts = @streams[stream]
|
45
|
+
if (opts = @streams[stream])
|
44
46
|
opts.merge!(options)
|
45
47
|
else
|
46
48
|
@streams[stream] = options.dup
|
@@ -88,10 +90,23 @@ module IOStreams
|
|
88
90
|
built_streams.freeze
|
89
91
|
end
|
90
92
|
|
93
|
+
# Returns the tabular format if set, otherwise tries to autodetect the format if the file_name has been set
|
94
|
+
# Returns [nil] if no format is set, or if it cannot be determined from the file_name
|
95
|
+
def format
|
96
|
+
@format ||= file_name ? Tabular.format_from_file_name(file_name) : nil
|
97
|
+
end
|
98
|
+
|
99
|
+
def format=(format)
|
100
|
+
raise(ArgumentError, "Invalid format: #{format.inspect}") unless format.nil? || IOStreams::Tabular.registered_formats.include?(format)
|
101
|
+
|
102
|
+
@format = format
|
103
|
+
end
|
104
|
+
|
91
105
|
private
|
92
106
|
|
93
107
|
def class_for_stream(type, stream)
|
94
|
-
ext = IOStreams.extensions[stream.nil? ? nil : stream.to_sym] ||
|
108
|
+
ext = IOStreams.extensions[stream.nil? ? nil : stream.to_sym] ||
|
109
|
+
raise(ArgumentError, "Unknown Stream type: #{stream.inspect}")
|
95
110
|
ext.send("#{type}_class") || raise(ArgumentError, "No #{type} registered for Stream type: #{stream.inspect}")
|
96
111
|
end
|
97
112
|
|
@@ -99,7 +114,7 @@ module IOStreams
|
|
99
114
|
def parse_extensions
|
100
115
|
parts = ::File.basename(file_name).split(".")
|
101
116
|
extensions = []
|
102
|
-
while extension = parts.pop
|
117
|
+
while (extension = parts.pop)
|
103
118
|
sym = extension.downcase.to_sym
|
104
119
|
break unless IOStreams.extensions[sym]
|
105
120
|
|
@@ -116,10 +131,12 @@ module IOStreams
|
|
116
131
|
block.call(io_stream)
|
117
132
|
elsif pipeline.size == 1
|
118
133
|
stream, opts = pipeline.first
|
119
|
-
class_for_stream(type, stream).open(io_stream, opts, &block)
|
134
|
+
class_for_stream(type, stream).open(io_stream, **opts, &block)
|
120
135
|
else
|
121
136
|
# Daisy chain multiple streams together
|
122
|
-
last = pipeline.keys.inject(block)
|
137
|
+
last = pipeline.keys.inject(block) do |inner, stream_sym|
|
138
|
+
->(io) { class_for_stream(type, stream_sym).open(io, **pipeline[stream_sym], &inner) }
|
139
|
+
end
|
123
140
|
last.call(io_stream)
|
124
141
|
end
|
125
142
|
end
|
@@ -2,11 +2,11 @@ module IOStreams
|
|
2
2
|
module Bzip2
|
3
3
|
class Reader < IOStreams::Reader
|
4
4
|
# Read from a Bzip2 stream, decompressing the contents as it is read
|
5
|
-
def self.stream(input_stream, **
|
6
|
-
Utils.load_soft_dependency("
|
5
|
+
def self.stream(input_stream, **args)
|
6
|
+
Utils.load_soft_dependency("bzip2-ffi", "Bzip2", "bzip2/ffi") unless defined?(::Bzip2::FFI)
|
7
7
|
|
8
8
|
begin
|
9
|
-
io =
|
9
|
+
io = ::Bzip2::FFI::Reader.new(input_stream, args)
|
10
10
|
yield io
|
11
11
|
ensure
|
12
12
|
io&.close
|
@@ -2,11 +2,11 @@ module IOStreams
|
|
2
2
|
module Bzip2
|
3
3
|
class Writer < IOStreams::Writer
|
4
4
|
# Write to a stream, compressing with Bzip2
|
5
|
-
def self.stream(input_stream, original_file_name: nil, **
|
6
|
-
Utils.load_soft_dependency("
|
5
|
+
def self.stream(input_stream, original_file_name: nil, **args)
|
6
|
+
Utils.load_soft_dependency("bzip2-ffi", "Bzip2", "bzip2/ffi") unless defined?(::Bzip2::FFI)
|
7
7
|
|
8
8
|
begin
|
9
|
-
io =
|
9
|
+
io = ::Bzip2::FFI::Writer.new(input_stream, args)
|
10
10
|
yield io
|
11
11
|
ensure
|
12
12
|
io&.close
|
@@ -206,7 +206,7 @@ module IOStreams
|
|
206
206
|
elsif streams.is_a?(Array)
|
207
207
|
streams.each { |stream| apply_old_style_streams(path, stream) }
|
208
208
|
elsif streams.is_a?(Hash)
|
209
|
-
streams.each_pair { |stream, options| path.stream(stream, options) }
|
209
|
+
streams.each_pair { |stream, options| path.stream(stream, **options) }
|
210
210
|
else
|
211
211
|
raise ArgumentError, "Invalid old style stream supplied: #{params.inspect}"
|
212
212
|
end
|
@@ -73,13 +73,11 @@ module IOStreams
|
|
73
73
|
# EOF reached?
|
74
74
|
return unless block
|
75
75
|
|
76
|
-
block = block.encode(@encoding,
|
76
|
+
block = block.encode(@encoding, **@encoding_options) unless block.encoding == @encoding
|
77
77
|
block = @cleaner.call(block, @replace) if @cleaner
|
78
78
|
block
|
79
79
|
end
|
80
80
|
|
81
|
-
private
|
82
|
-
|
83
81
|
def self.extract_cleaner(cleaner)
|
84
82
|
return if cleaner.nil?
|
85
83
|
|
@@ -66,7 +66,7 @@ module IOStreams
|
|
66
66
|
return 0 if data.nil?
|
67
67
|
|
68
68
|
data = data.to_s
|
69
|
-
block = data.encoding == @encoding ? data : data.encode(@encoding,
|
69
|
+
block = data.encoding == @encoding ? data : data.encode(@encoding, **@encoding_options)
|
70
70
|
block = @cleaner.call(block, @replace) if @cleaner
|
71
71
|
@output_stream.write(block)
|
72
72
|
end
|
data/lib/io_streams/errors.rb
CHANGED
@@ -9,6 +9,9 @@ module IOStreams
|
|
9
9
|
class MissingHeader < Error
|
10
10
|
end
|
11
11
|
|
12
|
+
class UnknownFormat < Error
|
13
|
+
end
|
14
|
+
|
12
15
|
class TypeMismatch < Error
|
13
16
|
end
|
14
17
|
|
@@ -18,5 +21,24 @@ module IOStreams
|
|
18
21
|
# When the specified delimiter is not found in the supplied stream / file
|
19
22
|
class DelimiterNotFound < Error
|
20
23
|
end
|
24
|
+
|
25
|
+
# Fixed length line has the wrong length
|
26
|
+
class InvalidLineLength < Error
|
27
|
+
end
|
28
|
+
|
29
|
+
class ValueTooLong < Error
|
30
|
+
end
|
31
|
+
|
32
|
+
class MalformedDataError < RuntimeError
|
33
|
+
attr_reader :line_number
|
34
|
+
|
35
|
+
def initialize(message, line_number)
|
36
|
+
@line_number = line_number
|
37
|
+
super("#{message} on line #{line_number}.")
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
class InvalidLayout < Error
|
42
|
+
end
|
21
43
|
end
|
22
44
|
end
|
@@ -13,8 +13,6 @@ require "uri"
|
|
13
13
|
# .zip.enc [ :zip, :enc ]
|
14
14
|
# .gz.enc [ :gz, :enc ]
|
15
15
|
module IOStreams
|
16
|
-
include Deprecated
|
17
|
-
|
18
16
|
# Returns [Path] instance for the supplied complete path with optional scheme.
|
19
17
|
#
|
20
18
|
# Example:
|
@@ -58,7 +56,7 @@ module IOStreams
|
|
58
56
|
end
|
59
57
|
|
60
58
|
# For an existing IO Stream
|
61
|
-
# IOStreams.stream(io).file_name('blah.zip').encoding('BINARY').
|
59
|
+
# IOStreams.stream(io).file_name('blah.zip').encoding('BINARY').read
|
62
60
|
# IOStreams.stream(io).file_name('blah.zip').encoding('BINARY').each(:line){ ... }
|
63
61
|
# IOStreams.stream(io).file_name('blah.csv.zip').each(:line) { ... }
|
64
62
|
# IOStreams.stream(io).stream(:zip).stream(:pgp, passphrase: 'receiver_passphrase').read
|
@@ -298,8 +296,6 @@ module IOStreams
|
|
298
296
|
@schemes[scheme_name.nil? ? nil : scheme_name.to_sym] || raise(ArgumentError, "Unknown Scheme type: #{scheme_name.inspect}")
|
299
297
|
end
|
300
298
|
|
301
|
-
private
|
302
|
-
|
303
299
|
Extension = Struct.new(:reader_class, :writer_class)
|
304
300
|
|
305
301
|
# Hold root paths
|
@@ -38,12 +38,12 @@ module IOStreams
|
|
38
38
|
# Size of blocks to read from the input stream at a time.
|
39
39
|
# Default: 65536 ( 64K )
|
40
40
|
#
|
41
|
-
#
|
42
|
-
#
|
43
|
-
#
|
44
|
-
#
|
45
|
-
#
|
46
|
-
#
|
41
|
+
# embedded_within: [String]
|
42
|
+
# Supports CSV files where a line may contain an embedded newline.
|
43
|
+
# For CSV files set `embedded_within: '"'`
|
44
|
+
#
|
45
|
+
# Note:
|
46
|
+
# * When using a line reader and the file_name ends with ".csv" then embedded_within is automatically set to `"`
|
47
47
|
def initialize(input_stream, delimiter: nil, buffer_size: 65_536, embedded_within: nil, original_file_name: nil)
|
48
48
|
super(input_stream)
|
49
49
|
|
@@ -63,11 +63,11 @@ module IOStreams
|
|
63
63
|
# Auto-detect windows/linux line endings if not supplied. \n or \r\n
|
64
64
|
@delimiter ||= auto_detect_line_endings
|
65
65
|
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
66
|
+
return unless @buffer
|
67
|
+
|
68
|
+
# Change the delimiters encoding to match that of the input stream
|
69
|
+
@delimiter = @delimiter.encode(@buffer.encoding)
|
70
|
+
@delimiter_size = @delimiter.size
|
71
71
|
end
|
72
72
|
|
73
73
|
# Iterate over every line in the file/stream passing each line to supplied block in turn.
|
@@ -86,17 +86,29 @@ module IOStreams
|
|
86
86
|
line_count
|
87
87
|
end
|
88
88
|
|
89
|
-
# Reads each line per the
|
90
|
-
#
|
89
|
+
# Reads each line per the `delimeter`.
|
90
|
+
# Accounts for lines that contain the `delimiter` when the `delimeter` is within the `embedded_within` delimiter.
|
91
|
+
# For Example, CSV files can contain newlines embedded within double quotes.
|
91
92
|
def readline
|
92
93
|
line = _readline
|
93
94
|
if line && @embedded_within
|
94
95
|
initial_line_number = @line_number
|
95
96
|
while line.count(@embedded_within).odd?
|
96
|
-
|
97
|
-
|
97
|
+
if eof? || line.length > @buffer_size * 10
|
98
|
+
raise(Errors::MalformedDataError.new(
|
99
|
+
"Unbalanced delimited field, delimiter: #{@embedded_within}",
|
100
|
+
initial_line_number
|
101
|
+
))
|
102
|
+
end
|
98
103
|
line << @delimiter
|
99
|
-
|
104
|
+
next_line = _readline
|
105
|
+
if next_line.nil?
|
106
|
+
raise(Errors::MalformedDataError.new(
|
107
|
+
"Unbalanced delimited field, delimiter: #{@embedded_within}",
|
108
|
+
initial_line_number
|
109
|
+
))
|
110
|
+
end
|
111
|
+
line << next_line
|
100
112
|
end
|
101
113
|
end
|
102
114
|
line
|
data/lib/io_streams/path.rb
CHANGED
@@ -82,6 +82,7 @@ module IOStreams
|
|
82
82
|
end
|
83
83
|
|
84
84
|
# Cleanup an incomplete write to the target "file" if the copy fails.
|
85
|
+
# rubocop:disable Lint/SuppressedException
|
85
86
|
def copy_from(source, **args)
|
86
87
|
super(source, **args)
|
87
88
|
rescue StandardError => e
|
@@ -91,6 +92,7 @@ module IOStreams
|
|
91
92
|
end
|
92
93
|
raise(e)
|
93
94
|
end
|
95
|
+
# rubocop:enable Lint/SuppressedException
|
94
96
|
|
95
97
|
# Moves the file by copying it to the new path and then deleting the current path.
|
96
98
|
# Returns [IOStreams::Path] the target path.
|
@@ -151,7 +153,7 @@ module IOStreams
|
|
151
153
|
# Returns [true|false] whether the file is compressed based on its file extensions.
|
152
154
|
def compressed?
|
153
155
|
# TODO: Look at streams?
|
154
|
-
!(path =~ /\.(zip|gz|gzip|
|
156
|
+
!(path =~ /\.(zip|gz|gzip|xlsx|xlsm|bz2)\z/i).nil?
|
155
157
|
end
|
156
158
|
|
157
159
|
# Returns [true|false] whether the file is encrypted based on its file extensions.
|
@@ -15,16 +15,16 @@ module IOStreams
|
|
15
15
|
# Examples:
|
16
16
|
#
|
17
17
|
# # Case Insensitive file name lookup:
|
18
|
-
# IOStreams
|
18
|
+
# IOStreams.path("ruby").glob("r*.md") { |name| puts name }
|
19
19
|
#
|
20
20
|
# # Case Sensitive file name lookup:
|
21
|
-
# IOStreams
|
21
|
+
# IOStreams.path("ruby").each("R*.md", case_sensitive: true) { |name| puts name }
|
22
22
|
#
|
23
23
|
# # Also return the names of directories found during the search:
|
24
|
-
# IOStreams
|
24
|
+
# IOStreams.path("ruby").each("R*.md", directories: true) { |name| puts name }
|
25
25
|
#
|
26
26
|
# # Case Insensitive recursive file name lookup:
|
27
|
-
# IOStreams
|
27
|
+
# IOStreams.path("ruby").glob("**/*.md") { |name| puts name }
|
28
28
|
#
|
29
29
|
# Parameters:
|
30
30
|
# pattern [String]
|
@@ -26,16 +26,19 @@ module IOStreams
|
|
26
26
|
#
|
27
27
|
# http_redirect_count: [Integer]
|
28
28
|
# Maximum number of http redirects to follow.
|
29
|
-
def initialize(url, username: nil, password: nil, http_redirect_count: 10)
|
29
|
+
def initialize(url, username: nil, password: nil, http_redirect_count: 10, parameters: nil)
|
30
30
|
uri = URI.parse(url)
|
31
31
|
unless %w[http https].include?(uri.scheme)
|
32
|
-
raise(
|
32
|
+
raise(
|
33
|
+
ArgumentError,
|
34
|
+
"Invalid URL. Required Format: 'http://<host_name>/<file_name>', or 'https://<host_name>/<file_name>'"
|
35
|
+
)
|
33
36
|
end
|
34
37
|
|
35
38
|
@username = username || uri.user
|
36
39
|
@password = password || uri.password
|
37
40
|
@http_redirect_count = http_redirect_count
|
38
|
-
@url = url
|
41
|
+
@url = parameters ? "#{url}?#{URI.encode_www_form(parameters)}" : url
|
39
42
|
super(uri.path)
|
40
43
|
end
|
41
44
|
|
data/lib/io_streams/paths/s3.rb
CHANGED
@@ -3,7 +3,10 @@ require "uri"
|
|
3
3
|
module IOStreams
|
4
4
|
module Paths
|
5
5
|
class S3 < IOStreams::Path
|
6
|
-
attr_reader :bucket_name, :client
|
6
|
+
attr_reader :bucket_name, :client, :options
|
7
|
+
|
8
|
+
# Largest file size supported by the S3 copy object api.
|
9
|
+
S3_COPY_OBJECT_SIZE_LIMIT = 5 * 1024 * 1024 * 1024
|
7
10
|
|
8
11
|
# Arguments:
|
9
12
|
#
|
@@ -92,7 +95,7 @@ module IOStreams
|
|
92
95
|
# encrypting data. This value is used to store the object and then it is
|
93
96
|
# discarded; Amazon does not store the encryption key. The key must be
|
94
97
|
# appropriate for use with the algorithm specified in the
|
95
|
-
# x-amz-server-side
|
98
|
+
# x-amz-server-side-encryption-customer-algorithm header.
|
96
99
|
#
|
97
100
|
# @option params [String] :sse_customer_key_md5
|
98
101
|
# Specifies the 128-bit MD5 digest of the encryption key according to
|
@@ -179,17 +182,36 @@ module IOStreams
|
|
179
182
|
#
|
180
183
|
# Notes:
|
181
184
|
# - Can copy across buckets.
|
185
|
+
# - No stream conversions are applied.
|
182
186
|
def move_to(target_path)
|
187
|
+
target = copy_to(target_path, convert: false)
|
188
|
+
delete
|
189
|
+
target
|
190
|
+
end
|
191
|
+
|
192
|
+
# Make S3 perform direct copies within S3 itself.
|
193
|
+
def copy_to(target_path, convert: true)
|
194
|
+
return super(target_path) if convert || (size.to_i >= S3_COPY_OBJECT_SIZE_LIMIT)
|
195
|
+
|
183
196
|
target = IOStreams.new(target_path)
|
184
197
|
return super(target) unless target.is_a?(self.class)
|
185
198
|
|
186
199
|
source_name = ::File.join(bucket_name, path)
|
187
|
-
|
188
|
-
client.copy_object(bucket: target.bucket_name, key: target.path, copy_source: source_name)
|
189
|
-
delete
|
200
|
+
client.copy_object(options.merge(bucket: target.bucket_name, key: target.path, copy_source: source_name))
|
190
201
|
target
|
191
202
|
end
|
192
203
|
|
204
|
+
# Make S3 perform direct copies within S3 itself.
|
205
|
+
def copy_from(source_path, convert: true)
|
206
|
+
return super(source_path) if convert
|
207
|
+
|
208
|
+
source = IOStreams.new(source_path)
|
209
|
+
return super(source) if !source.is_a?(self.class) || (source.size.to_i >= S3_COPY_OBJECT_SIZE_LIMIT)
|
210
|
+
|
211
|
+
source_name = ::File.join(source.bucket_name, source.path)
|
212
|
+
client.copy_object(options.merge(bucket: bucket_name, key: path, copy_source: source_name))
|
213
|
+
end
|
214
|
+
|
193
215
|
# S3 logically creates paths when a key is set.
|
194
216
|
def mkpath
|
195
217
|
self
|
@@ -220,7 +242,7 @@ module IOStreams
|
|
220
242
|
# Shortcut method if caller has a filename already with no other streams applied:
|
221
243
|
def read_file(file_name)
|
222
244
|
::File.open(file_name, "wb") do |file|
|
223
|
-
client.get_object(
|
245
|
+
client.get_object(options.merge(response_target: file, bucket: bucket_name, key: path))
|
224
246
|
end
|
225
247
|
end
|
226
248
|
|
@@ -248,10 +270,10 @@ module IOStreams
|
|
248
270
|
# Use multipart file upload
|
249
271
|
s3 = Aws::S3::Resource.new(client: client)
|
250
272
|
obj = s3.bucket(bucket_name).object(path)
|
251
|
-
obj.upload_file(file_name)
|
273
|
+
obj.upload_file(file_name, options)
|
252
274
|
else
|
253
275
|
::File.open(file_name, "rb") do |file|
|
254
|
-
client.put_object(
|
276
|
+
client.put_object(options.merge(bucket: bucket_name, key: path, body: file))
|
255
277
|
end
|
256
278
|
end
|
257
279
|
end
|