synqa 0.0.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.document +5 -0
- data/Gemfile +13 -0
- data/Gemfile.lock +20 -0
- data/LICENSE.txt +674 -0
- data/README.rdoc +31 -0
- data/Rakefile +53 -0
- data/VERSION +1 -0
- data/_project.el +9 -0
- data/examples/sample-rakefile +63 -0
- data/examples/synqa-useage.rb +30 -0
- data/lib/synqa.rb +738 -0
- data/test/helper.rb +18 -0
- data/test/test_synqa.rb +37 -0
- metadata +112 -0
data/README.rdoc
ADDED
@@ -0,0 +1,31 @@
|
|
1
|
+
= synqa
|
2
|
+
|
3
|
+
*Synqa* is a simple file syncing tool that works over SSH, and is designed
|
4
|
+
primarily for maintaining static websites. It uses a hash function to
|
5
|
+
determine which files don't need to be copied because the destination copy
|
6
|
+
is already identical to the source copy.
|
7
|
+
|
8
|
+
I wrote it for two main reasons:
|
9
|
+
|
10
|
+
* I couldn't get *rsync* to work on the combination of Cygwin and my
|
11
|
+
hosting provider, and the rsync error messages were not very informative.
|
12
|
+
* It was an opportunity to learn about SSH and how to use SSH and SCP with Ruby.
|
13
|
+
|
14
|
+
== Dependencies of *synqa* are:
|
15
|
+
|
16
|
+
* Ruby 1.9.2
|
17
|
+
* An SSH client. I use *plink*.
|
18
|
+
* An SCP client. I use *pscp*.
|
19
|
+
|
20
|
+
For some sample code, see <b>examples/synga-useage.rb</b> and <b>examples/sample-rakefile</b>.
|
21
|
+
|
22
|
+
== Licence
|
23
|
+
|
24
|
+
Synqa is licensed under the GNU General Public License version 3.
|
25
|
+
|
26
|
+
== Notes and Issues
|
27
|
+
|
28
|
+
* *Synqa* has not been tested (or even designed to work) with file names
|
29
|
+
containing whitespace or non-ASCII characters. Typically this doesn't matter for
|
30
|
+
many static websites, but it will reduce the tool's usefulness as a general purpose
|
31
|
+
backup tool.
|
data/Rakefile
ADDED
@@ -0,0 +1,53 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'bundler'
|
3
|
+
begin
|
4
|
+
Bundler.setup(:default, :development)
|
5
|
+
rescue Bundler::BundlerError => e
|
6
|
+
$stderr.puts e.message
|
7
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
8
|
+
exit e.status_code
|
9
|
+
end
|
10
|
+
require 'rake'
|
11
|
+
|
12
|
+
require 'jeweler'
|
13
|
+
Jeweler::Tasks.new do |gem|
|
14
|
+
# gem is a Gem::Specification... see http://docs.rubygems.org/read/chapter/20 for more options
|
15
|
+
gem.name = "synqa"
|
16
|
+
gem.homepage = "http://github.com/pdorrell/synqa"
|
17
|
+
gem.license = "MIT"
|
18
|
+
gem.summary = %Q{Sync files from a local directory to a remote directory via SSH/SCP}
|
19
|
+
gem.description = %Q{Sync files from a local directory to a remote directory via SSH/SCP}
|
20
|
+
gem.email = "http://www.1729.com/email.html"
|
21
|
+
gem.authors = ["Philip Dorrell"]
|
22
|
+
# Include your dependencies below. Runtime dependencies are required when using your gem,
|
23
|
+
# and development dependencies are only needed for development (ie running rake tasks, tests, etc)
|
24
|
+
# gem.add_runtime_dependency 'jabber4r', '> 0.1'
|
25
|
+
# gem.add_development_dependency 'rspec', '> 1.2.3'
|
26
|
+
end
|
27
|
+
Jeweler::RubygemsDotOrgTasks.new
|
28
|
+
|
29
|
+
require 'rake/testtask'
|
30
|
+
Rake::TestTask.new(:test) do |test|
|
31
|
+
test.libs << 'lib' << 'test'
|
32
|
+
test.pattern = 'test/**/test_*.rb'
|
33
|
+
test.verbose = true
|
34
|
+
end
|
35
|
+
|
36
|
+
require 'rcov/rcovtask'
|
37
|
+
Rcov::RcovTask.new do |test|
|
38
|
+
test.libs << 'test'
|
39
|
+
test.pattern = 'test/**/test_*.rb'
|
40
|
+
test.verbose = true
|
41
|
+
end
|
42
|
+
|
43
|
+
task :default => :test
|
44
|
+
|
45
|
+
require 'rake/rdoctask'
|
46
|
+
Rake::RDocTask.new do |rdoc|
|
47
|
+
version = File.exist?('VERSION') ? File.read('VERSION') : ""
|
48
|
+
|
49
|
+
rdoc.rdoc_dir = 'rdoc'
|
50
|
+
rdoc.title = "synqa #{version}"
|
51
|
+
rdoc.rdoc_files.include('README*')
|
52
|
+
rdoc.rdoc_files.include('lib/**/*.rb')
|
53
|
+
end
|
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
0.0.0
|
data/_project.el
ADDED
@@ -0,0 +1,9 @@
|
|
1
|
+
|
2
|
+
(load-this-project
|
3
|
+
`( (:ruby-executable ,*ruby-1.9-executable*)
|
4
|
+
(:run-project-command (ruby-run-file ,(concat (project-base-directory) "RunMain.rb")))
|
5
|
+
(:build-function project-compile-with-command)
|
6
|
+
(:compile-command "rake")
|
7
|
+
(:ruby-args ("-I."))
|
8
|
+
) )
|
9
|
+
|
@@ -0,0 +1,63 @@
|
|
1
|
+
|
2
|
+
# A redacted rakefile example using Synqa
|
3
|
+
# In this example the source files are copied directly from ./src to
|
4
|
+
# yourusername@yourhostname.example.com/home/username/public.
|
5
|
+
#
|
6
|
+
# For a more complex static site, your rakefile might generate the site
|
7
|
+
# into an output directory and upload from there.
|
8
|
+
#
|
9
|
+
# Tasks:
|
10
|
+
# clean - removes the cached content files
|
11
|
+
# upload - syncs local content with remote dir (i.e. uploads new/changed files
|
12
|
+
# and deletes remote files that don't exist in the local dir
|
13
|
+
# uploaddry - a "dry run" for upload, doesn't actually upload or delete files
|
14
|
+
|
15
|
+
require 'rejinnirate-rake'
|
16
|
+
require 'synqa' # This assumes synqa is installed as a gem, otherwise require 'synqa.rb'
|
17
|
+
require 'digest/sha2'
|
18
|
+
|
19
|
+
STDOUT.sync = true
|
20
|
+
|
21
|
+
include Rejinnirate
|
22
|
+
include Synqa
|
23
|
+
|
24
|
+
BASE_DIR = File.dirname(__FILE__)
|
25
|
+
|
26
|
+
SRC_DIR = File.join(BASE_DIR, "src")
|
27
|
+
UPLOAD_DIR = SRC_DIR
|
28
|
+
SYNQA_DIR = File.join(BASE_DIR, "output", "synqa")
|
29
|
+
|
30
|
+
task :default => [:upload] do |t|
|
31
|
+
end
|
32
|
+
|
33
|
+
REMOTE_HOST = SshContentHost.new("yourusername@yourhostname.example.com",
|
34
|
+
Sha256Command.new(), "plink", "pscp")
|
35
|
+
|
36
|
+
REMOTE_SITE = RemoteContentLocation.new(REMOTE_HOST,
|
37
|
+
"/home/username/public",
|
38
|
+
File.join(SYNQA_DIR, "nearlyContent.txt"))
|
39
|
+
|
40
|
+
LOCAL_SITE = LocalContentLocation.new(UPLOAD_DIR,
|
41
|
+
Digest::SHA256,
|
42
|
+
File.join(SYNQA_DIR, "localContent.txt"),
|
43
|
+
:excludes => ["*\\~", "**/*\\~"])
|
44
|
+
|
45
|
+
task :init do |t|
|
46
|
+
ensureDirectoryExists(SYNQA_DIR)
|
47
|
+
end
|
48
|
+
|
49
|
+
task :clean => [:init] do |t|
|
50
|
+
SyncOperation.new(LOCAL_SITE, REMOTE_SITE).clearCachedContentFiles()
|
51
|
+
end
|
52
|
+
|
53
|
+
task :list do |t|
|
54
|
+
REMOTE_SITE.listFiles()
|
55
|
+
end
|
56
|
+
|
57
|
+
task :uploaddry => [:init] do |t|
|
58
|
+
SyncOperation.new(LOCAL_SITE, REMOTE_SITE).doSync(:dryRun => true)
|
59
|
+
end
|
60
|
+
|
61
|
+
task :upload => [:init] do |t|
|
62
|
+
SyncOperation.new(LOCAL_SITE, REMOTE_SITE).doSync()
|
63
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# Sample code for synqa useage -- you will need to fill in your own details
|
2
|
+
|
3
|
+
require 'synqa.rb'
|
4
|
+
require 'digest/sha2'
|
5
|
+
|
6
|
+
STDOUT.sync = true
|
7
|
+
|
8
|
+
include Synqa
|
9
|
+
sha256Sum = Sha256SumCommand.new() # sha256sum (with 2 characters between hash and file name)
|
10
|
+
sha256 = Sha256Command.new() # sha256 -r (with 1 space between hash and file name)
|
11
|
+
|
12
|
+
localContentLocation = LocalContentLocation.new("c:/dev/src/project",
|
13
|
+
Digest::SHA256,
|
14
|
+
"c:/temp/synqa/local.project.content.cache.txt")
|
15
|
+
|
16
|
+
remoteHost = SshContentHost.new("username@host.example.com",
|
17
|
+
sha256, "plink", "pscp")
|
18
|
+
|
19
|
+
# Note: the specification of plink & pscp assumes that keys are managed with Pageant, and therefore
|
20
|
+
# do not need to be specified on the command line.
|
21
|
+
|
22
|
+
remoteContentLocation = RemoteContentLocation.new(remoteHost,
|
23
|
+
"/home/username/public",
|
24
|
+
"c:/temp/synqa/remote.project.content.cache.txt")
|
25
|
+
|
26
|
+
# Note: the cache files are currently written, but not yet used to speed up the sync
|
27
|
+
|
28
|
+
syncOperation = SyncOperation.new(localContentLocation, remoteContentLocation)
|
29
|
+
|
30
|
+
syncOperation.doSync(:dryRun => true) # set to false to make it actually happen
|
data/lib/synqa.rb
ADDED
@@ -0,0 +1,738 @@
|
|
1
|
+
require 'time'
|
2
|
+
|
3
|
+
module Synqa
|
4
|
+
|
5
|
+
def checkProcessStatus(description)
|
6
|
+
processStatus = $?
|
7
|
+
if not processStatus.exited?
|
8
|
+
raise "#{description}: process did not exit normally"
|
9
|
+
end
|
10
|
+
exitStatus = processStatus.exitstatus
|
11
|
+
if exitStatus != 0
|
12
|
+
raise "#{description}: exit status = #{exitStatus}"
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
class RelativePathWithHash
|
17
|
+
attr_reader :relativePath, :hash
|
18
|
+
|
19
|
+
def initialize(relativePath, hash)
|
20
|
+
@relativePath = relativePath
|
21
|
+
@hash = hash
|
22
|
+
end
|
23
|
+
|
24
|
+
def inspect
|
25
|
+
return "RelativePathWithHash[#{relativePath}, #{hash}]"
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
class HashCommand
|
30
|
+
|
31
|
+
attr_reader :command, :length, :spacerLen
|
32
|
+
|
33
|
+
def initialize(command, length, spacerLen)
|
34
|
+
@command = command
|
35
|
+
@length = length
|
36
|
+
@spacerLen = spacerLen
|
37
|
+
end
|
38
|
+
|
39
|
+
def parseFileHashLine(baseDir, fileHashLine)
|
40
|
+
hash = fileHashLine[0...length]
|
41
|
+
fullPath = fileHashLine[(length + spacerLen)..-1]
|
42
|
+
if fullPath.start_with?(baseDir)
|
43
|
+
return RelativePathWithHash.new(fullPath[baseDir.length..-1], hash)
|
44
|
+
else
|
45
|
+
raise "File #{fullPath} from hash line is not in base dir #{baseDir}"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def to_s
|
50
|
+
return command.join(" ")
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
class Sha256SumCommand<HashCommand
|
55
|
+
def initialize
|
56
|
+
super(["sha256sum"], 64, 2)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
class Sha256Command<HashCommand
|
61
|
+
def initialize
|
62
|
+
super(["sha256", "-r"], 64, 1)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def normalisedDir(baseDir)
|
67
|
+
return baseDir.end_with?("/") ? baseDir : baseDir + "/"
|
68
|
+
end
|
69
|
+
|
70
|
+
class DirContentHost
|
71
|
+
|
72
|
+
attr_reader :hashCommand, :pathPrefix
|
73
|
+
|
74
|
+
def initialize(hashCommand, pathPrefix = "")
|
75
|
+
@hashCommand = hashCommand
|
76
|
+
@pathPrefix = pathPrefix
|
77
|
+
end
|
78
|
+
|
79
|
+
def findDirectoriesCommand(baseDir)
|
80
|
+
return ["#{@pathPrefix}find", baseDir, "-type", "d", "-print"]
|
81
|
+
end
|
82
|
+
|
83
|
+
def listDirectories(baseDir)
|
84
|
+
baseDir = normalisedDir(baseDir)
|
85
|
+
command = findDirectoriesCommand(baseDir)
|
86
|
+
output = getCommandOutput(command)
|
87
|
+
directories = []
|
88
|
+
baseDirLen = baseDir.length
|
89
|
+
puts "Listing directories ..."
|
90
|
+
while (line = output.gets)
|
91
|
+
line = line.chomp
|
92
|
+
puts " #{line}"
|
93
|
+
if line.start_with?(baseDir)
|
94
|
+
directories << line[baseDirLen..-1]
|
95
|
+
else
|
96
|
+
raise "Directory #{line} is not a sub-directory of base directory #{baseDir}"
|
97
|
+
end
|
98
|
+
end
|
99
|
+
output.close()
|
100
|
+
checkProcessStatus(command)
|
101
|
+
return directories
|
102
|
+
end
|
103
|
+
|
104
|
+
def findFilesCommand(baseDir)
|
105
|
+
return ["#{@pathPrefix}find", baseDir, "-type", "f", "-print"]
|
106
|
+
end
|
107
|
+
|
108
|
+
def listFileHashes(baseDir)
|
109
|
+
baseDir = normalisedDir(baseDir)
|
110
|
+
fileHashes = []
|
111
|
+
listFileHashLines(baseDir) do |fileHashLine|
|
112
|
+
fileHash = self.hashCommand.parseFileHashLine(baseDir, fileHashLine)
|
113
|
+
if fileHash != nil
|
114
|
+
fileHashes << fileHash
|
115
|
+
end
|
116
|
+
end
|
117
|
+
return fileHashes
|
118
|
+
end
|
119
|
+
|
120
|
+
def getCommandOutput(command)
|
121
|
+
puts "#{command.inspect} ..."
|
122
|
+
return IO.popen(command)
|
123
|
+
end
|
124
|
+
|
125
|
+
def getContentTree(baseDir)
|
126
|
+
contentTree = ContentTree.new()
|
127
|
+
contentTree.time = Time.now.utc
|
128
|
+
for dir in listDirectories(baseDir)
|
129
|
+
contentTree.addDir(dir)
|
130
|
+
end
|
131
|
+
for fileHash in listFileHashes(baseDir)
|
132
|
+
contentTree.addFile(fileHash.relativePath, fileHash.hash)
|
133
|
+
end
|
134
|
+
return contentTree
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
class SshContentHost<DirContentHost
|
139
|
+
|
140
|
+
attr_reader :shell, :scpProgram, :host, :scpCommandString
|
141
|
+
|
142
|
+
def initialize(host, hashCommand, shell, scpProgram)
|
143
|
+
super(hashCommand)
|
144
|
+
@host = host
|
145
|
+
@shell = shell.is_a?(String) ? [shell] : shell
|
146
|
+
@scpProgram = scpProgram.is_a?(String) ? [scpProgram] : scpProgram
|
147
|
+
@scpCommandString = @scpProgram.join(" ")
|
148
|
+
end
|
149
|
+
|
150
|
+
def locationDescriptor(baseDir)
|
151
|
+
baseDir = normalisedDir(baseDir)
|
152
|
+
return "#{host}:#{baseDir} (connect = #{shell}/#{scpProgram}, hashCommand = #{hashCommand})"
|
153
|
+
end
|
154
|
+
|
155
|
+
def executeRemoteCommand(commandString, dryRun = false)
|
156
|
+
puts "SSH #{host} (#{shell.join(" ")}): executing #{commandString}"
|
157
|
+
if not dryRun
|
158
|
+
output = getCommandOutput(shell + [host, commandString])
|
159
|
+
while (line = output.gets)
|
160
|
+
yield line.chomp
|
161
|
+
end
|
162
|
+
output.close()
|
163
|
+
checkProcessStatus("SSH #{host} #{commandString}")
|
164
|
+
end
|
165
|
+
end
|
166
|
+
|
167
|
+
def ssh(commandString, dryRun = false)
|
168
|
+
executeRemoteCommand(commandString, dryRun) do |line|
|
169
|
+
puts line
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
def listDirectories(baseDir)
|
174
|
+
baseDir = normalisedDir(baseDir)
|
175
|
+
puts "Listing directories ..."
|
176
|
+
directories = []
|
177
|
+
baseDirLen = baseDir.length
|
178
|
+
executeRemoteCommand(findDirectoriesCommand(baseDir).join(" ")) do |line|
|
179
|
+
puts " #{line}"
|
180
|
+
if line.start_with?(baseDir)
|
181
|
+
directories << line[baseDirLen..-1]
|
182
|
+
else
|
183
|
+
raise "Directory #{line} is not a sub-directory of base directory #{baseDir}"
|
184
|
+
end
|
185
|
+
end
|
186
|
+
return directories
|
187
|
+
end
|
188
|
+
|
189
|
+
def listFileHashLines(baseDir)
|
190
|
+
baseDir = normalisedDir(baseDir)
|
191
|
+
remoteFileHashLinesCommand = findFilesCommand(baseDir) + ["|", "xargs", "-r"] + @hashCommand.command
|
192
|
+
executeRemoteCommand(remoteFileHashLinesCommand.join(" ")) do |line|
|
193
|
+
puts " #{line}"
|
194
|
+
yield line
|
195
|
+
end
|
196
|
+
end
|
197
|
+
|
198
|
+
def listFiles(baseDir)
|
199
|
+
baseDir = normalisedDir(baseDir)
|
200
|
+
executeRemoteCommand(findFilesCommand(baseDir).join(" ")) do |line|
|
201
|
+
puts " #{line}"
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
def getScpPath(path)
|
206
|
+
return host + ":" + path
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
class FileContent
|
211
|
+
attr_reader :name, :hash, :parentPathElements, :copyDestination, :toBeDeleted
|
212
|
+
|
213
|
+
def initialize(name, hash, parentPathElements)
|
214
|
+
@name = name
|
215
|
+
@hash = hash
|
216
|
+
@parentPathElements = parentPathElements
|
217
|
+
@copyDestination = nil
|
218
|
+
@toBeDeleted = false
|
219
|
+
end
|
220
|
+
|
221
|
+
def markToCopy(destinationDirectory)
|
222
|
+
@copyDestination = destinationDirectory
|
223
|
+
end
|
224
|
+
|
225
|
+
def markToDelete
|
226
|
+
@toBeDeleted = true
|
227
|
+
end
|
228
|
+
|
229
|
+
def to_s
|
230
|
+
return "#{name} (#{hash})"
|
231
|
+
end
|
232
|
+
|
233
|
+
def fullPath
|
234
|
+
return (parentPathElements + [name]).join("/")
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
class ContentTree
|
239
|
+
attr_reader :name, :pathElements, :files, :dirs, :fileByName, :dirByName
|
240
|
+
attr_reader :copyDestination, :toBeDeleted
|
241
|
+
attr_accessor :time
|
242
|
+
|
243
|
+
def initialize(name = nil, parentPathElements = nil)
|
244
|
+
@name = name
|
245
|
+
@pathElements = name == nil ? [] : parentPathElements + [name]
|
246
|
+
@files = []
|
247
|
+
@dirs = []
|
248
|
+
@fileByName = {}
|
249
|
+
@dirByName = {}
|
250
|
+
@copyDestination = nil
|
251
|
+
@toBeDeleted = false
|
252
|
+
@time = nil
|
253
|
+
end
|
254
|
+
|
255
|
+
def markToCopy(destinationDirectory)
|
256
|
+
@copyDestination = destinationDirectory
|
257
|
+
end
|
258
|
+
|
259
|
+
def markToDelete
|
260
|
+
@toBeDeleted = true
|
261
|
+
end
|
262
|
+
|
263
|
+
def fullPath
|
264
|
+
return @pathElements.join("/")
|
265
|
+
end
|
266
|
+
|
267
|
+
def getPathElements(path)
|
268
|
+
return path.is_a?(String) ? (path == "" ? [] : path.split("/")) : path
|
269
|
+
end
|
270
|
+
|
271
|
+
def getContentTreeForSubDir(subDir)
|
272
|
+
dirContentTree = dirByName.fetch(subDir, nil)
|
273
|
+
if dirContentTree == nil
|
274
|
+
dirContentTree = ContentTree.new(subDir, @pathElements)
|
275
|
+
dirs << dirContentTree
|
276
|
+
dirByName[subDir] = dirContentTree
|
277
|
+
end
|
278
|
+
return dirContentTree
|
279
|
+
end
|
280
|
+
|
281
|
+
def addDir(dirPath)
|
282
|
+
pathElements = getPathElements(dirPath)
|
283
|
+
if pathElements.length > 0
|
284
|
+
pathStart = pathElements[0]
|
285
|
+
restOfPath = pathElements[1..-1]
|
286
|
+
getContentTreeForSubDir(pathStart).addDir(restOfPath)
|
287
|
+
end
|
288
|
+
end
|
289
|
+
|
290
|
+
def sort!
|
291
|
+
dirs.sort_by! {|dir| dir.name}
|
292
|
+
files.sort_by! {|file| file.name}
|
293
|
+
for dir in dirs do
|
294
|
+
dir.sort!
|
295
|
+
end
|
296
|
+
end
|
297
|
+
|
298
|
+
def addFile(filePath, hash)
|
299
|
+
pathElements = getPathElements(filePath)
|
300
|
+
if pathElements.length == 0
|
301
|
+
raise "Invalid file path: #{filePath.inspect}"
|
302
|
+
end
|
303
|
+
if pathElements.length == 1
|
304
|
+
fileName = pathElements[0]
|
305
|
+
fileContent = FileContent.new(fileName, hash, @pathElements)
|
306
|
+
files << fileContent
|
307
|
+
fileByName[fileName] = fileContent
|
308
|
+
else
|
309
|
+
pathStart = pathElements[0]
|
310
|
+
restOfPath = pathElements[1..-1]
|
311
|
+
getContentTreeForSubDir(pathStart).addFile(restOfPath, hash)
|
312
|
+
end
|
313
|
+
end
|
314
|
+
|
315
|
+
@@dateTimeFormat = "%Y-%m-%d %H:%M:%S.%L %z"
|
316
|
+
|
317
|
+
def showIndented(name = "", indent = " ", currentIndent = "")
|
318
|
+
if time != nil
|
319
|
+
puts "#{currentIndent}[TIME: #{time.strftime(@@dateTimeFormat)}]"
|
320
|
+
end
|
321
|
+
if name != ""
|
322
|
+
puts "#{currentIndent}#{name}"
|
323
|
+
end
|
324
|
+
if copyDestination != nil
|
325
|
+
puts "#{currentIndent} [COPY to #{copyDestination.fullPath}]"
|
326
|
+
end
|
327
|
+
if toBeDeleted
|
328
|
+
puts "#{currentIndent} [DELETE]"
|
329
|
+
end
|
330
|
+
nextIndent = currentIndent + indent
|
331
|
+
for dir in dirs do
|
332
|
+
dir.showIndented("#{dir.name}/", indent = indent, currentIndent = nextIndent)
|
333
|
+
end
|
334
|
+
for file in files do
|
335
|
+
puts "#{nextIndent}#{file.name} - #{file.hash}"
|
336
|
+
if file.copyDestination != nil
|
337
|
+
puts "#{nextIndent} [COPY to #{file.copyDestination.fullPath}]"
|
338
|
+
end
|
339
|
+
if file.toBeDeleted
|
340
|
+
puts "#{nextIndent} [DELETE]"
|
341
|
+
end
|
342
|
+
end
|
343
|
+
end
|
344
|
+
|
345
|
+
def writeLinesToFile(outFile, prefix = "")
|
346
|
+
if time != nil
|
347
|
+
outFile.puts("T #{time.strftime(@@dateTimeFormat)}\n")
|
348
|
+
end
|
349
|
+
for dir in dirs do
|
350
|
+
outFile.puts("D #{prefix}#{dir.name}\n")
|
351
|
+
dir.writeLinesToFile(outFile, "#{prefix}#{dir.name}/")
|
352
|
+
end
|
353
|
+
for file in files do
|
354
|
+
outFile.puts("F #{file.hash} #{prefix}#{file.name}\n")
|
355
|
+
end
|
356
|
+
end
|
357
|
+
|
358
|
+
def writeToFile(fileName)
|
359
|
+
puts "Writing content tree to file #{fileName} ..."
|
360
|
+
File.open(fileName, "w") do |outFile|
|
361
|
+
writeLinesToFile(outFile)
|
362
|
+
end
|
363
|
+
end
|
364
|
+
|
365
|
+
@@dirLineRegex = /^D (.*)$/
|
366
|
+
@@fileLineRegex = /^F ([^ ]*) (.*)$/
|
367
|
+
@@timeRegex = /^T (.*)$/
|
368
|
+
|
369
|
+
def self.readFromFile(fileName)
|
370
|
+
contentTree = ContentTree.new()
|
371
|
+
puts "Reading content tree from #{fileName} ..."
|
372
|
+
IO.foreach(fileName) do |line|
|
373
|
+
dirLineMatch = @@dirLineRegex.match(line)
|
374
|
+
if dirLineMatch
|
375
|
+
dirName = dirLineMatch[1]
|
376
|
+
contentTree.addDir(dirName)
|
377
|
+
else
|
378
|
+
fileLineMatch = @@fileLineRegex.match(line)
|
379
|
+
if fileLineMatch
|
380
|
+
hash = fileLineMatch[1]
|
381
|
+
fileName = fileLineMatch[2]
|
382
|
+
contentTree.addFile(fileName, hash)
|
383
|
+
else
|
384
|
+
timeLineMatch = @@timeRegex.match(line)
|
385
|
+
if timeLineMatch
|
386
|
+
timeString = timeLineMatch[1]
|
387
|
+
contentTree.time = Time.strptime(timeString, @@dateTimeFormat)
|
388
|
+
else
|
389
|
+
raise "Invalid line in content tree file: #{line.inspect}"
|
390
|
+
end
|
391
|
+
end
|
392
|
+
end
|
393
|
+
end
|
394
|
+
return contentTree
|
395
|
+
end
|
396
|
+
|
397
|
+
def self.readMapOfHashesFromFile(fileName)
|
398
|
+
mapOfHashes = {}
|
399
|
+
time = nil
|
400
|
+
File.open(fileName).each_line do |line|
|
401
|
+
fileLineMatch = @@fileLineRegex.match(line)
|
402
|
+
if fileLineMatch
|
403
|
+
hash = fileLineMatch[1]
|
404
|
+
fileName = fileLineMatch[2]
|
405
|
+
mapOfHashes[fileName] = hash
|
406
|
+
end
|
407
|
+
timeLineMatch = @@timeRegex.match(line)
|
408
|
+
if timeLineMatch
|
409
|
+
timeString = timeLineMatch[1]
|
410
|
+
time = Time.strptime(timeString, @@dateTimeFormat)
|
411
|
+
end
|
412
|
+
end
|
413
|
+
return [time, mapOfHashes]
|
414
|
+
end
|
415
|
+
|
416
|
+
def markSyncOperationsForDestination(destination)
|
417
|
+
markCopyOperations(destination)
|
418
|
+
destination.markDeleteOptions(self)
|
419
|
+
end
|
420
|
+
|
421
|
+
def getDir(dir)
|
422
|
+
return dirByName.fetch(dir, nil)
|
423
|
+
end
|
424
|
+
|
425
|
+
def getFile(file)
|
426
|
+
return fileByName.fetch(file, nil)
|
427
|
+
end
|
428
|
+
|
429
|
+
def markCopyOperations(destinationDir)
|
430
|
+
for dir in dirs
|
431
|
+
destinationSubDir = destinationDir.getDir(dir.name)
|
432
|
+
if destinationSubDir != nil
|
433
|
+
dir.markCopyOperations(destinationSubDir)
|
434
|
+
else
|
435
|
+
dir.markToCopy(destinationDir)
|
436
|
+
end
|
437
|
+
end
|
438
|
+
for file in files
|
439
|
+
destinationFile = destinationDir.getFile(file.name)
|
440
|
+
if destinationFile == nil or destinationFile.hash != file.hash
|
441
|
+
file.markToCopy(destinationDir)
|
442
|
+
end
|
443
|
+
end
|
444
|
+
end
|
445
|
+
|
446
|
+
def markDeleteOptions(sourceDir)
|
447
|
+
for dir in dirs
|
448
|
+
sourceSubDir = sourceDir.getDir(dir.name)
|
449
|
+
if sourceSubDir == nil
|
450
|
+
dir.markToDelete()
|
451
|
+
else
|
452
|
+
dir.markDeleteOptions(sourceSubDir)
|
453
|
+
end
|
454
|
+
end
|
455
|
+
for file in files
|
456
|
+
sourceFile = sourceDir.getFile(file.name)
|
457
|
+
if sourceFile == nil
|
458
|
+
file.markToDelete()
|
459
|
+
end
|
460
|
+
end
|
461
|
+
end
|
462
|
+
end
|
463
|
+
|
464
|
+
class ContentLocation
|
465
|
+
attr_reader :cachedContentFile
|
466
|
+
|
467
|
+
def initialize(cachedContentFile)
|
468
|
+
@cachedContentFile = cachedContentFile
|
469
|
+
end
|
470
|
+
|
471
|
+
def getExistingCachedContentTreeFile
|
472
|
+
if cachedContentFile == nil
|
473
|
+
puts "No cached content file specified for location"
|
474
|
+
return nil
|
475
|
+
elsif File.exists?(cachedContentFile)
|
476
|
+
return cachedContentFile
|
477
|
+
else
|
478
|
+
puts "Cached content file #{cachedContentFile} does not yet exist."
|
479
|
+
return nil
|
480
|
+
end
|
481
|
+
end
|
482
|
+
|
483
|
+
def clearCachedContentFile
|
484
|
+
if cachedContentFile and File.exists?(cachedContentFile)
|
485
|
+
puts " deleting cached content file #{cachedContentFile} ..."
|
486
|
+
File.delete(cachedContentFile)
|
487
|
+
end
|
488
|
+
end
|
489
|
+
|
490
|
+
def getCachedContentTree
|
491
|
+
file = getExistingCachedContentTreeFile
|
492
|
+
if file
|
493
|
+
return ContentTree.readFromFile(file)
|
494
|
+
else
|
495
|
+
return nil
|
496
|
+
end
|
497
|
+
end
|
498
|
+
|
499
|
+
def getCachedContentTreeMapOfHashes
|
500
|
+
file = getExistingCachedContentTreeFile
|
501
|
+
if file
|
502
|
+
puts "Reading cached file hashes from #{file} ..."
|
503
|
+
return ContentTree.readMapOfHashesFromFile(file)
|
504
|
+
else
|
505
|
+
return [nil, {}]
|
506
|
+
end
|
507
|
+
end
|
508
|
+
|
509
|
+
end
|
510
|
+
|
511
|
+
class LocalContentLocation<ContentLocation
|
512
|
+
attr_reader :baseDir, :hashClass
|
513
|
+
|
514
|
+
def initialize(baseDir, hashClass, cachedContentFile = nil, options = {})
|
515
|
+
super(cachedContentFile)
|
516
|
+
@baseDir = normalisedDir(baseDir)
|
517
|
+
@baseDirLen = @baseDir.length
|
518
|
+
@hashClass = hashClass
|
519
|
+
@excludeGlobs = options.fetch(:excludes, [])
|
520
|
+
end
|
521
|
+
|
522
|
+
def getRelativePath(fileName)
|
523
|
+
if fileName.start_with? @baseDir
|
524
|
+
return fileName[@baseDirLen..-1]
|
525
|
+
else
|
526
|
+
raise "File name #{fileName} does not start with #{baseDir}"
|
527
|
+
end
|
528
|
+
end
|
529
|
+
|
530
|
+
def getScpPath(relativePath)
|
531
|
+
return getFullPath(relativePath)
|
532
|
+
end
|
533
|
+
|
534
|
+
def getFullPath(relativePath)
|
535
|
+
return @baseDir + relativePath
|
536
|
+
end
|
537
|
+
|
538
|
+
def fileIsExcluded(relativeFile)
|
539
|
+
for excludeGlob in @excludeGlobs
|
540
|
+
if File.fnmatch(excludeGlob, relativeFile)
|
541
|
+
puts " file #{relativeFile} excluded by glob #{excludeGlob}"
|
542
|
+
return true
|
543
|
+
end
|
544
|
+
end
|
545
|
+
return false
|
546
|
+
end
|
547
|
+
|
548
|
+
def getContentTree
|
549
|
+
cachedTimeAndMapOfHashes = getCachedContentTreeMapOfHashes
|
550
|
+
cachedTime = cachedTimeAndMapOfHashes[0]
|
551
|
+
cachedMapOfHashes = cachedTimeAndMapOfHashes[1]
|
552
|
+
contentTree = ContentTree.new()
|
553
|
+
contentTree.time = Time.now.utc
|
554
|
+
#puts "LocalContentLocation.getContentTree for baseDir #{baseDir} ..."
|
555
|
+
for fileOrDir in Dir.glob(baseDir + "**/*", File::FNM_DOTMATCH)
|
556
|
+
if not (fileOrDir.end_with?("/.") or fileOrDir.end_with?("/.."))
|
557
|
+
relativePath = getRelativePath(fileOrDir)
|
558
|
+
#puts " #{relativePath}"
|
559
|
+
if File.directory? fileOrDir
|
560
|
+
contentTree.addDir(relativePath)
|
561
|
+
else
|
562
|
+
if not fileIsExcluded(relativePath)
|
563
|
+
cachedDigest = cachedMapOfHashes[relativePath]
|
564
|
+
if cachedTime and cachedDigest and File.stat(fileOrDir).mtime < cachedTime
|
565
|
+
digest = cachedDigest
|
566
|
+
else
|
567
|
+
digest = hashClass.file(fileOrDir).hexdigest
|
568
|
+
end
|
569
|
+
contentTree.addFile(relativePath, digest)
|
570
|
+
end
|
571
|
+
end
|
572
|
+
end
|
573
|
+
end
|
574
|
+
contentTree.sort!
|
575
|
+
if cachedContentFile != nil
|
576
|
+
contentTree.writeToFile(cachedContentFile)
|
577
|
+
end
|
578
|
+
return contentTree
|
579
|
+
end
|
580
|
+
end
|
581
|
+
|
582
|
+
class RemoteContentLocation<ContentLocation
|
583
|
+
attr_reader :host, :baseDir
|
584
|
+
|
585
|
+
def initialize(host, baseDir, cachedContentFile = nil)
|
586
|
+
super(cachedContentFile)
|
587
|
+
@host = host
|
588
|
+
@baseDir = normalisedDir(baseDir)
|
589
|
+
end
|
590
|
+
|
591
|
+
def listFiles()
|
592
|
+
host.listFiles(baseDir)
|
593
|
+
end
|
594
|
+
|
595
|
+
def scpCommandString
|
596
|
+
return host.scpCommandString
|
597
|
+
end
|
598
|
+
|
599
|
+
def getFullPath(relativePath)
|
600
|
+
return baseDir + relativePath
|
601
|
+
end
|
602
|
+
|
603
|
+
def getScpPath(relativePath)
|
604
|
+
return host.getScpPath(getFullPath(relativePath))
|
605
|
+
end
|
606
|
+
|
607
|
+
def ssh(commandString, dryRun = false)
|
608
|
+
host.ssh(commandString, dryRun)
|
609
|
+
end
|
610
|
+
|
611
|
+
def listDirectories
|
612
|
+
return host.listDirectories(baseDir)
|
613
|
+
end
|
614
|
+
|
615
|
+
def listFileHashes
|
616
|
+
return host.listFileHashes(baseDir)
|
617
|
+
end
|
618
|
+
|
619
|
+
def to_s
|
620
|
+
return host.locationDescriptor(baseDir)
|
621
|
+
end
|
622
|
+
|
623
|
+
def getContentTree
|
624
|
+
if cachedContentFile and File.exists?(cachedContentFile)
|
625
|
+
return ContentTree.readFromFile(cachedContentFile)
|
626
|
+
else
|
627
|
+
contentTree = host.getContentTree(baseDir)
|
628
|
+
contentTree.sort!
|
629
|
+
if cachedContentFile != nil
|
630
|
+
contentTree.writeToFile(cachedContentFile)
|
631
|
+
end
|
632
|
+
return contentTree
|
633
|
+
end
|
634
|
+
end
|
635
|
+
|
636
|
+
end
|
637
|
+
|
638
|
+
class SyncOperation
|
639
|
+
attr_reader :sourceLocation, :destinationLocation
|
640
|
+
|
641
|
+
def initialize(sourceLocation, destinationLocation)
|
642
|
+
@sourceLocation = sourceLocation
|
643
|
+
@destinationLocation = destinationLocation
|
644
|
+
end
|
645
|
+
|
646
|
+
def getContentTrees
|
647
|
+
@sourceContent = @sourceLocation.getContentTree()
|
648
|
+
@destinationContent = @destinationLocation.getContentTree()
|
649
|
+
end
|
650
|
+
|
651
|
+
def markSyncOperations
|
652
|
+
@sourceContent.markSyncOperationsForDestination(@destinationContent)
|
653
|
+
puts " ================================================ "
|
654
|
+
puts "After marking for sync --"
|
655
|
+
puts ""
|
656
|
+
puts "Local:"
|
657
|
+
@sourceContent.showIndented()
|
658
|
+
puts ""
|
659
|
+
puts "Remote:"
|
660
|
+
@destinationContent.showIndented()
|
661
|
+
end
|
662
|
+
|
663
|
+
def clearCachedContentFiles
|
664
|
+
@sourceLocation.clearCachedContentFile()
|
665
|
+
@destinationLocation.clearCachedContentFile()
|
666
|
+
end
|
667
|
+
|
668
|
+
def doSync(options = {})
|
669
|
+
if options[:full]
|
670
|
+
clearCachedContentFiles()
|
671
|
+
end
|
672
|
+
getContentTrees()
|
673
|
+
markSyncOperations()
|
674
|
+
dryRun = options[:dryRun]
|
675
|
+
if not dryRun
|
676
|
+
@destinationLocation.clearCachedContentFile()
|
677
|
+
end
|
678
|
+
doAllCopyOperations(dryRun)
|
679
|
+
doAllDeleteOperations(dryRun)
|
680
|
+
if (@destinationLocation.cachedContentFile and @sourceLocation.cachedContentFile and
|
681
|
+
File.exists?(@sourceLocation.cachedContentFile))
|
682
|
+
FileUtils::Verbose.cp(@sourceLocation.cachedContentFile, @destinationLocation.cachedContentFile)
|
683
|
+
end
|
684
|
+
end
|
685
|
+
|
686
|
+
def doAllCopyOperations(dryRun)
|
687
|
+
doCopyOperations(@sourceContent, @destinationContent, dryRun)
|
688
|
+
end
|
689
|
+
|
690
|
+
def doAllDeleteOperations(dryRun)
|
691
|
+
doDeleteOperations(@destinationContent, dryRun)
|
692
|
+
end
|
693
|
+
|
694
|
+
def executeCommand(command, dryRun)
|
695
|
+
puts "EXECUTE: #{command}"
|
696
|
+
if not dryRun
|
697
|
+
system(command)
|
698
|
+
checkProcessStatus(command)
|
699
|
+
end
|
700
|
+
end
|
701
|
+
|
702
|
+
def doCopyOperations(sourceContent, destinationContent, dryRun)
|
703
|
+
for dir in sourceContent.dirs do
|
704
|
+
if dir.copyDestination != nil
|
705
|
+
sourcePath = sourceLocation.getScpPath(dir.fullPath)
|
706
|
+
destinationPath = destinationLocation.getScpPath(dir.copyDestination.fullPath)
|
707
|
+
executeCommand("#{destinationLocation.scpCommandString} -r #{sourcePath} #{destinationPath}", dryRun)
|
708
|
+
else
|
709
|
+
doCopyOperations(dir, destinationContent.getDir(dir.name), dryRun)
|
710
|
+
end
|
711
|
+
end
|
712
|
+
for file in sourceContent.files do
|
713
|
+
if file.copyDestination != nil
|
714
|
+
sourcePath = sourceLocation.getScpPath(file.fullPath)
|
715
|
+
destinationPath = destinationLocation.getScpPath(file.copyDestination.fullPath)
|
716
|
+
executeCommand("#{destinationLocation.scpCommandString} #{sourcePath} #{destinationPath}", dryRun)
|
717
|
+
end
|
718
|
+
end
|
719
|
+
end
|
720
|
+
|
721
|
+
def doDeleteOperations(destinationContent, dryRun)
|
722
|
+
for dir in destinationContent.dirs do
|
723
|
+
if dir.toBeDeleted
|
724
|
+
dirPath = destinationLocation.getFullPath(dir.fullPath)
|
725
|
+
destinationLocation.ssh("rm -r #{dirPath}", dryRun)
|
726
|
+
else
|
727
|
+
doDeleteOperations(dir, dryRun)
|
728
|
+
end
|
729
|
+
end
|
730
|
+
for file in destinationContent.files do
|
731
|
+
if file.toBeDeleted
|
732
|
+
filePath = destinationLocation.getFullPath(file.fullPath)
|
733
|
+
destinationLocation.ssh("rm #{filePath}", dryRun)
|
734
|
+
end
|
735
|
+
end
|
736
|
+
end
|
737
|
+
end
|
738
|
+
end
|