winrm-transport 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.cane +2 -0
- data/.gitignore +15 -0
- data/.travis.yml +26 -0
- data/CHANGELOG.md +3 -0
- data/Gemfile +13 -0
- data/Guardfile +27 -0
- data/LICENSE.txt +15 -0
- data/README.md +80 -0
- data/Rakefile +49 -0
- data/bin/console +7 -0
- data/bin/setup +7 -0
- data/lib/winrm/transport.rb +28 -0
- data/lib/winrm/transport/command_executor.rb +217 -0
- data/lib/winrm/transport/file_transporter.rb +468 -0
- data/lib/winrm/transport/logging.rb +47 -0
- data/lib/winrm/transport/shell_closer.rb +71 -0
- data/lib/winrm/transport/tmp_zip.rb +184 -0
- data/lib/winrm/transport/version.rb +25 -0
- data/support/check_files.ps1 +46 -0
- data/support/decode_files.ps1 +60 -0
- data/winrm-transport.gemspec +52 -0
- metadata +272 -0
@@ -0,0 +1,468 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
#
|
3
|
+
# Author:: Fletcher (<fnichol@nichol.ca>)
|
4
|
+
#
|
5
|
+
# Copyright (C) 2015, Fletcher Nichol
|
6
|
+
#
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
8
|
+
# you may not use this file except in compliance with the License.
|
9
|
+
# You may obtain a copy of the License at
|
10
|
+
#
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
12
|
+
#
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
16
|
+
# See the License for the specific language governing permissions and
|
17
|
+
# limitations under the License.
|
18
|
+
|
19
|
+
require "benchmark"
|
20
|
+
require "csv"
|
21
|
+
require "digest"
|
22
|
+
require "securerandom"
|
23
|
+
require "stringio"
|
24
|
+
|
25
|
+
require "winrm/transport/logging"
|
26
|
+
require "winrm/transport/tmp_zip"
|
27
|
+
|
28
|
+
module WinRM
|
29
|
+
|
30
|
+
module Transport
|
31
|
+
|
32
|
+
# Wrapped exception for any internally raised WinRM-related errors.
|
33
|
+
#
|
34
|
+
# @author Fletcher Nichol <fnichol@nichol.ca>
|
35
|
+
class FileTransporterFailed < ::WinRM::WinRMError; end
|
36
|
+
|
37
|
+
# Object which can upload one or more files or directories to a remote
|
38
|
+
# host over WinRM using PowerShell scripts and CMD commands. Note that
|
39
|
+
# this form of file transfer is *not* ideal and extremely costly on both
|
40
|
+
# the local and remote sides. Great pains are made to minimize round
|
41
|
+
# trips to the remote host and to minimize the number of PowerShell
|
42
|
+
# sessions being invoked which can be 2 orders of magnitude more
|
43
|
+
# expensive than vanilla CMD commands.
|
44
|
+
#
|
45
|
+
# This object is supported by either a `WinRM::WinRMWebService` or
|
46
|
+
# `CommandExecutor` instance as it depends on the `#run_cmd` and
|
47
|
+
# `#run_powershell_script` API contracts.
|
48
|
+
#
|
49
|
+
# An optional logger can be supplied, assuming it can respond to the
|
50
|
+
# `#debug` and `#debug?` messages.
|
51
|
+
#
|
52
|
+
# @author Fletcher Nichol <fnichol@nichol.ca>
|
53
|
+
# @author Matt Wrock <matt@mattwrock.com>
|
54
|
+
class FileTransporter
|
55
|
+
|
56
|
+
include Logging
|
57
|
+
|
58
|
+
# Creates a FileTransporter given a service object and optional logger.
|
59
|
+
# The service object may be a `WinRM::WinRMWebService` or
|
60
|
+
# `CommandExecutor` instance.
|
61
|
+
#
|
62
|
+
# @param service [WinRM::WinRMWebService,CommandExecutor] a
|
63
|
+
# winrm web service object
|
64
|
+
# @param logger [#debug,#debug?] an optional logger/ui object that
|
65
|
+
# responds to `#debug` and `#debug?` (default: `nil`)
|
66
|
+
def initialize(service, logger = nil, opts = {})
|
67
|
+
@service = service
|
68
|
+
@logger = logger
|
69
|
+
@id_generator = opts.fetch(:id_generator) { -> { SecureRandom.uuid } }
|
70
|
+
end
|
71
|
+
|
72
|
+
# Uploads a collection of files and/or directories to the remote host.
|
73
|
+
#
|
74
|
+
# **TODO Notes:**
|
75
|
+
# * options could specify zip mode, zip options, etc.
|
76
|
+
# * maybe option to set tmpfile base dir to override $env:PATH?
|
77
|
+
# * progress yields block like net-scp progress
|
78
|
+
# * final API: def upload(locals, remote, _options = {}, &_progress)
|
79
|
+
#
|
80
|
+
# @param locals [Array<String>,String] one or more local file or
|
81
|
+
# directory paths
|
82
|
+
# @param remote [String] the base destination path on the remote host
|
83
|
+
# @return [Hash] report hash, keyed by the local MD5 digest
|
84
|
+
def upload(locals, remote)
|
85
|
+
files = nil
|
86
|
+
|
87
|
+
elapsed = Benchmark.measure do
|
88
|
+
files = make_files_hash(Array(locals), remote)
|
89
|
+
|
90
|
+
report = check_files(files)
|
91
|
+
merge_with_report!(files, report)
|
92
|
+
|
93
|
+
report = stream_upload_files(files)
|
94
|
+
merge_with_report!(files, report)
|
95
|
+
|
96
|
+
report = decode_files(files)
|
97
|
+
merge_with_report!(files, report)
|
98
|
+
|
99
|
+
cleanup(files)
|
100
|
+
end
|
101
|
+
|
102
|
+
debug {
|
103
|
+
"Uploaded #{files.keys.size} items " \
|
104
|
+
"in #{duration(elapsed.real)}"
|
105
|
+
}
|
106
|
+
|
107
|
+
files
|
108
|
+
end
|
109
|
+
|
110
|
+
private
|
111
|
+
|
112
|
+
# @return [Integer] the maximum number of bytes that can be supplied on
|
113
|
+
# a Windows CMD prompt without exceeded the maximum command line
|
114
|
+
# length
|
115
|
+
# @api private
|
116
|
+
MAX_ENCODED_WRITE = 8000
|
117
|
+
|
118
|
+
# @return [String] the Array pack template for Base64 encoding a stream
|
119
|
+
# of data
|
120
|
+
# @api private
|
121
|
+
BASE64_PACK = "m0".freeze
|
122
|
+
|
123
|
+
# @return [#debug,#debug?] the logger
|
124
|
+
# @api private
|
125
|
+
attr_reader :logger
|
126
|
+
|
127
|
+
# @return [WinRM::WinRMWebService,Winrm::CommandExecutor] a WinRM web
|
128
|
+
# service object
|
129
|
+
# @api private
|
130
|
+
attr_reader :service
|
131
|
+
|
132
|
+
# Adds an entry to a files Hash (keyed by local MD5 digest) for a
|
133
|
+
# directory. When a directory is added, a temporary Zip file is created
|
134
|
+
# containing the contents of the directory and any file-related data
|
135
|
+
# such as MD5 digest, size, etc. will be referring to the Zip file.
|
136
|
+
#
|
137
|
+
# @param hash [Hash] hash to be mutated
|
138
|
+
# @param dir [String] directory path to be Zipped and added
|
139
|
+
# @param remote [String] path to destination on remote host
|
140
|
+
# @api private
|
141
|
+
def add_directory_hash!(hash, dir, remote)
|
142
|
+
zip_io = TmpZip.new(dir, logger)
|
143
|
+
zip_md5 = md5sum(zip_io.path)
|
144
|
+
|
145
|
+
hash[zip_md5] = {
|
146
|
+
"src" => dir,
|
147
|
+
"src_zip" => zip_io.path.to_s,
|
148
|
+
"zip_io" => zip_io,
|
149
|
+
"tmpzip" => "$env:TEMP\\tmpzip-#{zip_md5}.zip",
|
150
|
+
"dst" => remote,
|
151
|
+
"size" => File.size(zip_io.path)
|
152
|
+
}
|
153
|
+
end
|
154
|
+
|
155
|
+
# Adds an entry to a files Hash (keyed by local MD5 digest) for a file.
|
156
|
+
#
|
157
|
+
# @param hash [Hash] hash to be mutated
|
158
|
+
# @param local [String] file path
|
159
|
+
# @param remote [String] path to destination on remote host
|
160
|
+
# @api private
|
161
|
+
def add_file_hash!(hash, local, remote)
|
162
|
+
hash[md5sum(local)] = {
|
163
|
+
"src" => local,
|
164
|
+
"dst" => "#{remote}\\#{File.basename(local)}",
|
165
|
+
"size" => File.size(local)
|
166
|
+
}
|
167
|
+
end
|
168
|
+
|
169
|
+
# Runs the check_files PowerShell script against a collection of
|
170
|
+
# destination path/MD5 checksum pairs. The PowerShell script returns
|
171
|
+
# its results as a CSV-formatted report which is converted into a Ruby
|
172
|
+
# Hash.
|
173
|
+
#
|
174
|
+
# @param files [Hash] files hash, keyed by the local MD5 digest
|
175
|
+
# @return [Hash] a report hash, keyed by the local MD5 digest
|
176
|
+
# @api private
|
177
|
+
def check_files(files)
|
178
|
+
debug { "Running check_files.ps1" }
|
179
|
+
hash_file = create_remote_hash_file(check_files_ps_hash(files))
|
180
|
+
vars = %{$hash_file = "#{hash_file}"\n}
|
181
|
+
|
182
|
+
output = service.run_powershell_script(
|
183
|
+
[vars, check_files_script].join("\n")
|
184
|
+
)
|
185
|
+
parse_response(output)
|
186
|
+
end
|
187
|
+
|
188
|
+
# Constructs a collection of destination path/MD5 checksum pairs as a
|
189
|
+
# String representation of the contents of a PowerShell Hash Table.
|
190
|
+
#
|
191
|
+
# @param files [Hash] files hash, keyed by the local MD5 digest
|
192
|
+
# @return [String] the inner contents of a PowerShell Hash Table
|
193
|
+
# @api private
|
194
|
+
def check_files_ps_hash(files)
|
195
|
+
ps_hash(Hash[
|
196
|
+
files.map { |md5, data| [data.fetch("tmpzip", data["dst"]), md5] }
|
197
|
+
])
|
198
|
+
end
|
199
|
+
|
200
|
+
# @return [String] the check_files PowerShell script
|
201
|
+
# @api private
|
202
|
+
def check_files_script
|
203
|
+
@check_files_script ||= IO.read(File.join(
|
204
|
+
File.dirname(__FILE__), %W[.. .. .. support check_files.ps1]
|
205
|
+
))
|
206
|
+
end
|
207
|
+
|
208
|
+
# Performs any final cleanup on the report Hash and removes any
|
209
|
+
# temporary files/resources used in the upload task.
|
210
|
+
#
|
211
|
+
# @param files [Hash] a files hash
|
212
|
+
# @api private
|
213
|
+
def cleanup(files)
|
214
|
+
files.select { |_, data| data.key?("zip_io") }.each do |md5, data|
|
215
|
+
data.fetch("zip_io").unlink
|
216
|
+
files.fetch(md5).delete("zip_io")
|
217
|
+
debug { "Cleaned up src_zip #{data["src_zip"]}" }
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
# Creates a remote Base64-encoded temporary file containing a
|
222
|
+
# PowerShell hash table.
|
223
|
+
#
|
224
|
+
# @param hash [String] a String representation of a PowerShell hash
|
225
|
+
# table
|
226
|
+
# @return [String] the remote path to the temporary file
|
227
|
+
# @api private
|
228
|
+
def create_remote_hash_file(hash)
|
229
|
+
hash_file = "$env:TEMP\\hash-#{@id_generator.call}.txt"
|
230
|
+
hash.lines.each { |line| debug { line.chomp } }
|
231
|
+
StringIO.open(hash) { |io| stream_upload(io, hash_file) }
|
232
|
+
hash_file
|
233
|
+
end
|
234
|
+
|
235
|
+
# Runs the decode_files PowerShell script against a collection of
|
236
|
+
# temporary file/destination path pairs. The PowerShell script returns
|
237
|
+
# its results as a CSV-formatted report which is converted into a Ruby
|
238
|
+
# Hash. The script will not be invoked if there are no "dirty" files
|
239
|
+
# present in the incoming files Hash.
|
240
|
+
#
|
241
|
+
# @param files [Hash] files hash, keyed by the local MD5 digest
|
242
|
+
# @return [Hash] a report hash, keyed by the local MD5 digest
|
243
|
+
# @api private
|
244
|
+
def decode_files(files)
|
245
|
+
decoded_files = decode_files_ps_hash(files)
|
246
|
+
|
247
|
+
if decoded_files == ps_hash(Hash.new)
|
248
|
+
debug { "No remote files to decode, skipping" }
|
249
|
+
Hash.new
|
250
|
+
else
|
251
|
+
debug { "Running decode_files.ps1" }
|
252
|
+
hash_file = create_remote_hash_file(decoded_files)
|
253
|
+
vars = %{$hash_file = "#{hash_file}"\n}
|
254
|
+
|
255
|
+
output = service.run_powershell_script(
|
256
|
+
[vars, decode_files_script].join("\n")
|
257
|
+
)
|
258
|
+
parse_response(output)
|
259
|
+
end
|
260
|
+
end
|
261
|
+
|
262
|
+
# Constructs a collection of temporary file/destination path pairs for
|
263
|
+
# all "dirty" files as a String representation of the contents of a
|
264
|
+
# PowerShell Hash Table. A "dirty" file is one which has the
|
265
|
+
# `"chk_dirty"` option set to `"True"` in the incoming files Hash.
|
266
|
+
#
|
267
|
+
# @param files [Hash] files hash, keyed by the local MD5 digest
|
268
|
+
# @return [String] the inner contents of a PowerShell Hash Table
|
269
|
+
# @api private
|
270
|
+
def decode_files_ps_hash(files)
|
271
|
+
result = files.select { |_, data| data["chk_dirty"] == "True" }.map { |_, data|
|
272
|
+
val = { "dst" => data["dst"] }
|
273
|
+
val["tmpzip"] = data["tmpzip"] if data["tmpzip"]
|
274
|
+
|
275
|
+
[data["tmpfile"], val]
|
276
|
+
}
|
277
|
+
|
278
|
+
ps_hash(Hash[result])
|
279
|
+
end
|
280
|
+
|
281
|
+
# @return [String] the decode_files PowerShell script
|
282
|
+
# @api private
|
283
|
+
def decode_files_script
|
284
|
+
@decode_files_script ||= IO.read(File.join(
|
285
|
+
File.dirname(__FILE__), %W[.. .. .. support decode_files.ps1]
|
286
|
+
))
|
287
|
+
end
|
288
|
+
|
289
|
+
# Returns a formatted string representing a duration in seconds.
|
290
|
+
#
|
291
|
+
# @param total [Integer] the total number of seconds
|
292
|
+
# @return [String] a formatted string of the form (XmYY.00s)
|
293
|
+
def duration(total)
|
294
|
+
total = 0 if total.nil?
|
295
|
+
minutes = (total / 60).to_i
|
296
|
+
seconds = (total - (minutes * 60))
|
297
|
+
format("(%dm%.2fs)", minutes, seconds)
|
298
|
+
end
|
299
|
+
|
300
|
+
# Contructs a Hash of files or directories, keyed by the local MD5
|
301
|
+
# digest. Each file entry has a source and destination set, at a
|
302
|
+
# minimum.
|
303
|
+
#
|
304
|
+
# @param locals [Array<String>] a collection of local files or
|
305
|
+
# directories
|
306
|
+
# @param remote [String] the base destination path on the remote host
|
307
|
+
# @return [Hash] files hash, keyed by the local MD5 digest
|
308
|
+
# @api private
|
309
|
+
def make_files_hash(locals, remote)
|
310
|
+
hash = Hash.new
|
311
|
+
locals.each do |local|
|
312
|
+
expanded = File.expand_path(local)
|
313
|
+
expanded += local[-1] if local.end_with?("/", "\\")
|
314
|
+
|
315
|
+
if File.file?(expanded)
|
316
|
+
add_file_hash!(hash, expanded, remote)
|
317
|
+
elsif File.directory?(expanded)
|
318
|
+
add_directory_hash!(hash, expanded, remote)
|
319
|
+
else
|
320
|
+
raise Errno::ENOENT, "No such file or directory #{expanded}"
|
321
|
+
end
|
322
|
+
end
|
323
|
+
hash
|
324
|
+
end
|
325
|
+
|
326
|
+
# @return [String] the MD5 digest of a local file
|
327
|
+
# @api private
|
328
|
+
def md5sum(local)
|
329
|
+
Digest::MD5.file(local).hexdigest
|
330
|
+
end
|
331
|
+
|
332
|
+
# Destructively merges a report Hash into an existing files Hash.
|
333
|
+
# **Note:** this method mutates the files Hash.
|
334
|
+
#
|
335
|
+
# @param files [Hash] files hash, keyed by the local MD5 digest
|
336
|
+
# @param report [Hash] report hash, keyed by the local MD5 digest
|
337
|
+
# @api private
|
338
|
+
def merge_with_report!(files, report)
|
339
|
+
files.merge!(report) { |_, oldval, newval| oldval.merge(newval) }
|
340
|
+
end
|
341
|
+
|
342
|
+
# @param depth [Integer] number of padding characters (default: `0`)
|
343
|
+
# @return [String] a whitespace padded string of the given length
|
344
|
+
# @api private
|
345
|
+
def pad(depth = 0)
|
346
|
+
" " * depth
|
347
|
+
end
|
348
|
+
|
349
|
+
# Parses response of a PowerShell script or CMD command which contains
|
350
|
+
# a CSV-formatted document in the standard output stream.
|
351
|
+
#
|
352
|
+
# @param output [WinRM::Output] output object with stdout, stderr, and
|
353
|
+
# exit code
|
354
|
+
# @return [Hash] report hash, keyed by the local MD5 digest
|
355
|
+
# @api private
|
356
|
+
def parse_response(output)
|
357
|
+
if output[:exitcode] != 0
|
358
|
+
raise FileTransporterFailed, "[#{self.class}] Upload failed " \
|
359
|
+
"(exitcode: #{output[:exitcode]})\n#{output.stderr}"
|
360
|
+
end
|
361
|
+
array = CSV.parse(output.stdout, :headers => true).map(&:to_hash)
|
362
|
+
array.each { |h| h.each { |key, value| h[key] = nil if value == "" } }
|
363
|
+
Hash[array.map { |entry| [entry.fetch("src_md5"), entry] }]
|
364
|
+
end
|
365
|
+
|
366
|
+
# Converts a Ruby hash into a PowerShell hash table, represented in a
|
367
|
+
# String.
|
368
|
+
#
|
369
|
+
# @param obj [Object] source Hash or object when used in recursive
|
370
|
+
# calls
|
371
|
+
# @param depth [Integer] padding depth, used in recursive calls
|
372
|
+
# (default: `0`)
|
373
|
+
# @return [String] a PowerShell hash table
|
374
|
+
# @api private
|
375
|
+
def ps_hash(obj, depth = 0)
|
376
|
+
if obj.is_a?(Hash)
|
377
|
+
obj.map { |k, v|
|
378
|
+
%{#{pad(depth + 2)}#{ps_hash(k)} = #{ps_hash(v, depth + 2)}}
|
379
|
+
}.join("\n").insert(0, "@{\n").insert(-1, "\n#{pad(depth)}}")
|
380
|
+
else
|
381
|
+
%{"#{obj}"}
|
382
|
+
end
|
383
|
+
end
|
384
|
+
|
385
|
+
# Uploads an IO stream to a Base64-encoded destination file.
|
386
|
+
#
|
387
|
+
# **Implementation Note:** Some of the code in this method may appear
|
388
|
+
# slightly too dense and while adding additional variables would help,
|
389
|
+
# the code is written very precisely to avoid unwanted allocations
|
390
|
+
# which will bloat the Ruby VM's object space (and memory footprint).
|
391
|
+
# The goal here is to stream potentially large files to a remote host
|
392
|
+
# while not loading the entire file into memory first, then Base64
|
393
|
+
# encoding it--duplicating the file in memory again.
|
394
|
+
#
|
395
|
+
# @param input_io [#read] a readable stream or object to be uploaded
|
396
|
+
# @param dest [String] path to the destination file on the remote host
|
397
|
+
# @return [Integer,Integer] the number of resulting upload chunks and
|
398
|
+
# the number of bytes transferred to the remote host
|
399
|
+
# @api private
|
400
|
+
def stream_upload(input_io, dest)
|
401
|
+
dest_cmd = dest.sub("$env:TEMP", "%TEMP%")
|
402
|
+
read_size = (MAX_ENCODED_WRITE.to_i / 4) * 3
|
403
|
+
chunk, bytes = 1, 0
|
404
|
+
buffer = ""
|
405
|
+
service.run_cmd(%{echo|set /p=>"#{dest_cmd}"}) # truncate empty file
|
406
|
+
while input_io.read(read_size, buffer)
|
407
|
+
bytes += (buffer.bytesize / 3 * 4)
|
408
|
+
service.run_cmd([buffer].pack(BASE64_PACK).
|
409
|
+
insert(0, "echo ").concat(%{ >> "#{dest_cmd}"}))
|
410
|
+
debug { "Wrote chunk #{chunk} for #{dest}" } if chunk % 25 == 0
|
411
|
+
chunk += 1
|
412
|
+
end
|
413
|
+
buffer = nil # rubocop:disable Lint/UselessAssignment
|
414
|
+
|
415
|
+
[chunk - 1, bytes]
|
416
|
+
end
|
417
|
+
|
418
|
+
# Uploads a local file to a Base64-encoded temporary file.
|
419
|
+
#
|
420
|
+
# @param src [String] path to a local file
|
421
|
+
# @param tmpfile [String] path to the temporary file on the remote
|
422
|
+
# host
|
423
|
+
# @return [Integer,Integer] the number of resulting upload chunks and
|
424
|
+
# the number of bytes transferred to the remote host
|
425
|
+
# @api private
|
426
|
+
def stream_upload_file(src, tmpfile)
|
427
|
+
debug { "Uploading #{src} to encoded tmpfile #{tmpfile}" }
|
428
|
+
chunks, bytes = 0, 0
|
429
|
+
elapsed = Benchmark.measure do
|
430
|
+
File.open(src, "rb") do |io|
|
431
|
+
chunks, bytes = stream_upload(io, tmpfile)
|
432
|
+
end
|
433
|
+
end
|
434
|
+
debug {
|
435
|
+
"Finished uploading #{src} to encoded tmpfile #{tmpfile} " \
|
436
|
+
"(#{bytes.to_f / 1000} KB over #{chunks} chunks) " \
|
437
|
+
"in #{duration(elapsed.real)}"
|
438
|
+
}
|
439
|
+
|
440
|
+
[chunks, bytes]
|
441
|
+
end
|
442
|
+
|
443
|
+
# Uploads a collection of "dirty" files to the remote host as
|
444
|
+
# Base64-encoded temporary files. A "dirty" file is one which has the
|
445
|
+
# `"chk_dirty"` option set to `"True"` in the incoming files Hash.
|
446
|
+
#
|
447
|
+
# @param files [Hash] files hash, keyed by the local MD5 digest
|
448
|
+
# @return [Hash] a report hash, keyed by the local MD5 digest
|
449
|
+
# @api private
|
450
|
+
def stream_upload_files(files)
|
451
|
+
response = Hash.new
|
452
|
+
files.each do |md5, data|
|
453
|
+
src = data.fetch("src_zip", data["src"])
|
454
|
+
if data["chk_dirty"] == "True"
|
455
|
+
tmpfile = "$env:TEMP\\b64-#{md5}.txt"
|
456
|
+
response[md5] = { "tmpfile" => tmpfile }
|
457
|
+
chunks, bytes = stream_upload_file(src, tmpfile)
|
458
|
+
response[md5]["chunks"] = chunks
|
459
|
+
response[md5]["xfered"] = bytes
|
460
|
+
else
|
461
|
+
debug { "File #{data["dst"]} is up to date, skipping" }
|
462
|
+
end
|
463
|
+
end
|
464
|
+
response
|
465
|
+
end
|
466
|
+
end
|
467
|
+
end
|
468
|
+
end
|