complearn 0.6.2
Sign up to get free protection for your applications and to get access to all the features.
- data/AUTHORS +13 -0
- data/COPYING +340 -0
- data/ChangeLog +0 -0
- data/INSTALL +231 -0
- data/Makefile +352 -0
- data/Makefile.am +76 -0
- data/Makefile.in +352 -0
- data/NEWS +7 -0
- data/README +0 -0
- data/aclocal.m4 +104 -0
- data/bin/Makefile +209 -0
- data/bin/Makefile.am +8 -0
- data/bin/Makefile.in +209 -0
- data/bin/labeltree +68 -0
- data/bin/labeltree.in +68 -0
- data/bin/makesvm +70 -0
- data/bin/makesvm.in +70 -0
- data/bin/maketree +98 -0
- data/bin/maketree.in +98 -0
- data/bin/ncd +43 -0
- data/bin/ncd.in +43 -0
- data/bin/ncdmatrix +54 -0
- data/bin/ncdmatrix.in +54 -0
- data/bin/ncdvector +50 -0
- data/bin/ncdvector.in +50 -0
- data/complearn-0.6.2.gem +0 -0
- data/complearn.gemspec +57 -0
- data/config.log +597 -0
- data/config.status +1082 -0
- data/configure +4922 -0
- data/configure.ac +91 -0
- data/confstat5FpLBf/config.h +65 -0
- data/confstat5FpLBf/subs-1.sed +50 -0
- data/confstat5FpLBf/subs-2.sed +13 -0
- data/confstat5FpLBf/subs.frag +0 -0
- data/confstat5FpLBf/subs.sed +59 -0
- data/confstat5FpLBf/undefs.sed +24 -0
- data/doc/FAQ.txt +67 -0
- data/doc/Makefile +286 -0
- data/doc/Makefile.am +11 -0
- data/doc/Makefile.in +286 -0
- data/doc/devguide.txt +15 -0
- data/doc/example.complearnrc +14 -0
- data/doc/examples.txt +35 -0
- data/doc/man/Makefile +255 -0
- data/doc/man/Makefile.am +11 -0
- data/doc/man/Makefile.in +255 -0
- data/doc/man/complearn.5 +91 -0
- data/doc/man/labeltree.1 +35 -0
- data/doc/man/makesvm.1 +60 -0
- data/doc/man/maketree.1 +58 -0
- data/doc/man/ncd.1 +51 -0
- data/doc/man/ncdmatrix.1 +40 -0
- data/doc/man/ncdvector.1 +42 -0
- data/doc/readme.txt +101 -0
- data/doc/userguide.txt +46 -0
- data/examples/genes/blueWhale.txt +1 -0
- data/examples/genes/cat.txt +1 -0
- data/examples/genes/chimpanzee.txt +1 -0
- data/examples/genes/finWhale.txt +1 -0
- data/examples/genes/graySeal.txt +1 -0
- data/examples/genes/harborSeal.txt +1 -0
- data/examples/genes/horse.txt +1 -0
- data/examples/genes/human.txt +1 -0
- data/examples/genes/mouse.txt +1 -0
- data/examples/genes/rat.txt +1 -0
- data/ext/Makefile +167 -0
- data/ext/Quartet.c +399 -0
- data/ext/Quartet.h +62 -0
- data/ext/TreeScore.c +244 -0
- data/ext/TreeScore.h +3 -0
- data/ext/config.h +65 -0
- data/ext/config.h.in +64 -0
- data/ext/extconf.rb +3 -0
- data/ext/lib/CompLearnLib/CLConfig.rb +241 -0
- data/ext/lib/CompLearnLib/CompressionObject.rb +59 -0
- data/ext/lib/CompLearnLib/CompressionTask.rb +99 -0
- data/ext/lib/CompLearnLib/DistMatrix.rb +18 -0
- data/ext/lib/CompLearnLib/FoundComp.rb +10 -0
- data/ext/lib/CompLearnLib/FoundComp.rb.in +10 -0
- data/ext/lib/CompLearnLib/Ncd.rb +248 -0
- data/ext/lib/CompLearnLib/RunEnv.rb +150 -0
- data/ext/lib/CompLearnLib/Task.rb +39 -0
- data/ext/lib/CompLearnLib/TaskMaster.rb +13 -0
- data/ext/lib/CompLearnLib/TaskMasterMPI.rb +112 -0
- data/ext/lib/CompLearnLib/TaskMasterSingle.rb +39 -0
- data/ext/lib/CompLearnLib/Tree.rb +300 -0
- data/install-sh +294 -0
- data/missing +336 -0
- data/mkinstalldirs +111 -0
- data/o +24 -0
- data/scripts/CompLearn.iss +89 -0
- data/scripts/CompLearn.iss.in +89 -0
- data/scripts/debian/changelog +6 -0
- data/scripts/debian/control +14 -0
- data/scripts/makeSetup.sh +23 -0
- data/scripts/makeSetup.sh.in +23 -0
- data/scripts/makedeb.zsh +46 -0
- data/scripts/makedeb.zsh.in +46 -0
- data/tests/alltests.rb +2 -0
- data/tests/bz2test.rb +516 -0
- data/tests/sshagent-test.rb +48 -0
- data/tests/tests.rb +275 -0
- metadata +164 -0
@@ -0,0 +1,150 @@
|
|
1
|
+
module MRunEnv
|
2
|
+
require 'CompLearnLib/CLConfig'
|
3
|
+
|
4
|
+
# For now, use these paths
|
5
|
+
class MPIRunner
|
6
|
+
@@MPIRUBY='mpi_ruby'
|
7
|
+
|
8
|
+
@@MPIRUN='mpirun'
|
9
|
+
@@MPILAMBOOT='lamboot'
|
10
|
+
@@MPIWIPE='wipe'
|
11
|
+
@@MPIHALT='lamhalt'
|
12
|
+
|
13
|
+
end
|
14
|
+
|
15
|
+
# Change this by setting OUTPUTDIR in your shell environment
|
16
|
+
class DirNames
|
17
|
+
def DirNames.outputDir()
|
18
|
+
ENV['OUTPUTDIR'] || "/ufs/cilibrar/src/rubyquart/results"
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
class RunEnv
|
23
|
+
@@TMPDIR="/tmp"
|
24
|
+
|
25
|
+
@tmpCounter = 0
|
26
|
+
def RunEnv.getTemporaryFilename()
|
27
|
+
fname = format("%s/tmp-%05d.dat", @@TMPDIR, @tmpCounter)
|
28
|
+
@tmpCounter += 1
|
29
|
+
at_exit { File.exist?(fname) && File.unlink(fname) }
|
30
|
+
fname
|
31
|
+
end
|
32
|
+
def RunEnv.getTemporaryFilenames(howMany)
|
33
|
+
results = [ ]
|
34
|
+
howMany.times { results << RunEnv.getTemporaryFilename() }
|
35
|
+
results
|
36
|
+
end
|
37
|
+
def RunEnv.zrand(n)
|
38
|
+
rand(n)
|
39
|
+
end
|
40
|
+
def RunEnv.dirsInDir(dirname)
|
41
|
+
dirs = [ ]
|
42
|
+
for f in Dir.entries(dirname)
|
43
|
+
dirs << f if File.ftype(dirname+"/"+f)=='directory' && f != '.' && f != '..'
|
44
|
+
end
|
45
|
+
dirs.sort
|
46
|
+
end
|
47
|
+
def RunEnv.filesInDir(dirname)
|
48
|
+
files = [ ]
|
49
|
+
for f in Dir.entries(dirname)
|
50
|
+
files << f if File.ftype(dirname+"/"+f)=='file'
|
51
|
+
end
|
52
|
+
files.sort
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
class MPIRunner
|
57
|
+
def MPIRunner.getGoodHostsFile()
|
58
|
+
unless defined?(@hostFilename)
|
59
|
+
@hostFilename = SSHAgent.writeGoodHostFile
|
60
|
+
end
|
61
|
+
@hostFilename
|
62
|
+
end
|
63
|
+
def MPIRunner.rebootMPI()
|
64
|
+
MPIRunner.haltMPI
|
65
|
+
MPIRunner.bootMPI
|
66
|
+
end
|
67
|
+
def MPIRunner.bootMPI()
|
68
|
+
MPIRunner.getGoodHostsFile
|
69
|
+
result = `#{@@MPILAMBOOT} -x -v -s #{@hostFilename}`
|
70
|
+
puts result
|
71
|
+
@lamRunning = true
|
72
|
+
end
|
73
|
+
def MPIRunner.lamRunning?()
|
74
|
+
defined?(@lamRunning) && @lamRunning
|
75
|
+
end
|
76
|
+
def MPIRunner.haltMPI()
|
77
|
+
MPIRunner.getGoodHostsFile
|
78
|
+
#result = `#{@@MPIHALT}`
|
79
|
+
result = `#{@@MPIWIPE} -v #{@hostFilename}`
|
80
|
+
SSHAgent.runEverywhere('"killall mpi_ruby;killall lamboot;killall lamd"')
|
81
|
+
@lamRunning = false
|
82
|
+
end
|
83
|
+
def MPIRunner.stopMPIRubyScript()
|
84
|
+
#Process.kill(9, pid)
|
85
|
+
MPIRunner.haltMPI()
|
86
|
+
end
|
87
|
+
def MPIRunner.runMPIRubyScript(scriptArray) # returns PID of controller
|
88
|
+
scriptName = scriptArray[0]
|
89
|
+
scriptArgs = (scriptArray[1..-1]).join(' ')
|
90
|
+
puts "Running script #{scriptName} on #{`hostname`}"
|
91
|
+
MPIRunner.bootMPI unless lamRunning?
|
92
|
+
# res = fork()
|
93
|
+
# if (res)
|
94
|
+
# res
|
95
|
+
# else
|
96
|
+
exec("#{@@MPIRUN} C #{@@MPIRUBY} #{scriptName} #{scriptArgs}")
|
97
|
+
# end
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
class SSHAgent
|
102
|
+
def SSHAgent.isRunning?()
|
103
|
+
SSHAgent.checkHost(SSHAgent.primaryHost)
|
104
|
+
end
|
105
|
+
def SSHAgent.checkHost(hostname)
|
106
|
+
puts "Checking #{hostname}"
|
107
|
+
result = `unset DISPLAY ; setsid ssh #{hostname} hostname -s`
|
108
|
+
result.gsub!(/\s/,'')
|
109
|
+
hostname == result
|
110
|
+
end
|
111
|
+
def SSHAgent.primaryHost()
|
112
|
+
CLConfig.getDefaultConfig().hosts[0]
|
113
|
+
end
|
114
|
+
def SSHAgent.runAt(cmd, h)
|
115
|
+
if (h == 'dorado')
|
116
|
+
puts "Skipping running #{cmd} on dorado"
|
117
|
+
else
|
118
|
+
`unset DISPLAY ; setsid ssh #{h} #{cmd}`
|
119
|
+
end
|
120
|
+
end
|
121
|
+
def SSHAgent.runEverywhere(cmd)
|
122
|
+
[SSHAgent.primaryHost(), SSHAgent.helperHosts()].flatten.each { |h|
|
123
|
+
SSHAgent.runAt(cmd, h)
|
124
|
+
}
|
125
|
+
end
|
126
|
+
def SSHAgent.helperHosts()
|
127
|
+
CLConfig.getDefaultConfig().hosts[1..-1]
|
128
|
+
end
|
129
|
+
def SSHAgent.liveHosts()
|
130
|
+
unless SSHAgent.isRunning?
|
131
|
+
puts "Sorry, you forgot to start ssh-agent or run ssh-add"
|
132
|
+
exit(1)
|
133
|
+
end
|
134
|
+
alive = [ ]
|
135
|
+
SSHAgent.helperHosts.each { |h| alive << h if SSHAgent.checkHost(h) }
|
136
|
+
alive
|
137
|
+
end
|
138
|
+
def SSHAgent.writeGoodHostFile() # returns filename of new hosts file
|
139
|
+
cfg = CLConfig.getDefaultConfig
|
140
|
+
fname = RunEnv.getTemporaryFilename()
|
141
|
+
f = File.open(fname, "wb")
|
142
|
+
cfg.hosts.each { |h|
|
143
|
+
f.write("#{h}\n")
|
144
|
+
}
|
145
|
+
f.close
|
146
|
+
fname
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
|
2
|
+
require 'CompLearnLib/TaskMaster'
|
3
|
+
|
4
|
+
module MTask
|
5
|
+
|
6
|
+
class Task
|
7
|
+
|
8
|
+
attr_reader :tid, :sentTime
|
9
|
+
|
10
|
+
def aboutToSend()
|
11
|
+
@sentTime = time
|
12
|
+
end
|
13
|
+
|
14
|
+
def setTid(ntid)
|
15
|
+
@tid = ntid
|
16
|
+
end
|
17
|
+
|
18
|
+
def reply(result)
|
19
|
+
TaskMaster.reply(self,result)
|
20
|
+
end
|
21
|
+
|
22
|
+
def fetch(key)
|
23
|
+
TaskMaster.fetch(key)
|
24
|
+
end
|
25
|
+
|
26
|
+
def execute()
|
27
|
+
reply(nil)
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
class Store
|
33
|
+
attr_reader :key, :val
|
34
|
+
def initialize(ukey, uval)
|
35
|
+
@key,@val = ukey,uval
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
|
2
|
+
unless defined?(MPI)
|
3
|
+
puts "Sorry, you seem to have started an MPIRuby script using"
|
4
|
+
puts "the wrong ruby interpretter. Please try again something"
|
5
|
+
puts "like ruby parbegin.rb yourScriptName.rb"
|
6
|
+
exit 1
|
7
|
+
end
|
8
|
+
|
9
|
+
class ExitMsg
|
10
|
+
end
|
11
|
+
|
12
|
+
class TaskMaster
|
13
|
+
|
14
|
+
@@haveInitted = false
|
15
|
+
|
16
|
+
@cbs = { }
|
17
|
+
@free = [ ]
|
18
|
+
@slaves = [ ]
|
19
|
+
@tasks = { }
|
20
|
+
@maxTid = 0
|
21
|
+
@storage = { }
|
22
|
+
|
23
|
+
def TaskMaster.fetch(key)
|
24
|
+
@storage[key]
|
25
|
+
end
|
26
|
+
|
27
|
+
def TaskMaster.doMasterInit()
|
28
|
+
return if @@haveInitted
|
29
|
+
@@haveInitted = true
|
30
|
+
sz = MPI::Comm::WORLD.size()
|
31
|
+
puts "Starting calculation with #{sz} nodes"
|
32
|
+
Kernel.at_exit() {
|
33
|
+
exitmsg = ExitMsg.new
|
34
|
+
@slaves.each { |n|
|
35
|
+
MPI::Comm::WORLD.send(exitmsg, n, 0)
|
36
|
+
}
|
37
|
+
}
|
38
|
+
sz.times { |i| @free << i unless i == 0 }
|
39
|
+
@slaves = @free.clone
|
40
|
+
end
|
41
|
+
|
42
|
+
def TaskMaster.doSlaveLoop()
|
43
|
+
Process.setpriority(Process::PRIO_PROCESS, Process.pid, 19)
|
44
|
+
while true
|
45
|
+
task,status = MPI::Comm::WORLD.recv(0, 0)
|
46
|
+
if task.is_a?(ExitMsg)
|
47
|
+
exit(0)
|
48
|
+
end
|
49
|
+
if task.is_a?(Task)
|
50
|
+
task.execute()
|
51
|
+
end
|
52
|
+
if task.is_a?(Store)
|
53
|
+
@storage[task.key] = task.val
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def TaskMaster.init()
|
59
|
+
if (MPI::Comm::WORLD.rank() == 0)
|
60
|
+
TaskMaster.doMasterInit()
|
61
|
+
else
|
62
|
+
TaskMaster.doSlaveLoop()
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def TaskMaster.getFreeSlave()
|
67
|
+
waitForSlave() if @free.size == 0
|
68
|
+
nextSlave = @free.shift
|
69
|
+
nextSlave
|
70
|
+
end
|
71
|
+
|
72
|
+
def TaskMaster.waitForReply()
|
73
|
+
TaskMaster.waitForSlave()
|
74
|
+
end
|
75
|
+
|
76
|
+
def TaskMaster.reply(task, result)
|
77
|
+
MPI::Comm::WORLD.send([task.tid, result], 0, 0)
|
78
|
+
end
|
79
|
+
|
80
|
+
def TaskMaster.handleReply(reply, status)
|
81
|
+
tid,obj = reply
|
82
|
+
if @cbs[tid]
|
83
|
+
@cbs[tid].call(obj, @tasks[tid], status.source)
|
84
|
+
@cbs[tid] = nil
|
85
|
+
end
|
86
|
+
@tasks[tid] = nil
|
87
|
+
@free << status.source
|
88
|
+
end
|
89
|
+
|
90
|
+
def TaskMaster.waitForSlave()
|
91
|
+
reply, status = MPI::Comm::WORLD.recv(MPI::Comm::ANY_SOURCE, 0)
|
92
|
+
TaskMaster.handleReply(reply,status)
|
93
|
+
end
|
94
|
+
|
95
|
+
def TaskMaster.storeEverywhere(key, val)
|
96
|
+
@slaves.each { |n|
|
97
|
+
MPI::Comm::WORLD.send(Store.new(key,val), n, 0)
|
98
|
+
}
|
99
|
+
end
|
100
|
+
|
101
|
+
def TaskMaster.enqueue(t, &cb)
|
102
|
+
@maxTid += 1
|
103
|
+
t.setTid(@maxTid)
|
104
|
+
@cbs[t.tid] = cb
|
105
|
+
@tasks[t.tid] = t
|
106
|
+
nextFreeSlave = TaskMaster.getFreeSlave
|
107
|
+
m = Marshal.dump(t)
|
108
|
+
MPI::Comm::WORLD.send(t, nextFreeSlave, 0)
|
109
|
+
end
|
110
|
+
|
111
|
+
end
|
112
|
+
|
@@ -0,0 +1,39 @@
|
|
1
|
+
|
2
|
+
class TaskMaster
|
3
|
+
|
4
|
+
@lastReply = nil
|
5
|
+
@storage = { }
|
6
|
+
|
7
|
+
def TaskMaster.reply(t,reply)
|
8
|
+
@lastReply = reply
|
9
|
+
end
|
10
|
+
|
11
|
+
def TaskMaster.storeEverywhere(key, val)
|
12
|
+
@storage[key.clone] = val.clone
|
13
|
+
end
|
14
|
+
|
15
|
+
def TaskMaster.fetch(key)
|
16
|
+
@storage[key]
|
17
|
+
end
|
18
|
+
|
19
|
+
def TaskMaster.doMasterInit()
|
20
|
+
end
|
21
|
+
|
22
|
+
def TaskMaster.init()
|
23
|
+
end
|
24
|
+
|
25
|
+
def TaskMaster.waitForReply()
|
26
|
+
TaskMaster.waitForSlave()
|
27
|
+
end
|
28
|
+
def TaskMaster.waitForSlave()
|
29
|
+
end
|
30
|
+
|
31
|
+
def TaskMaster.handleReply(reply, status)
|
32
|
+
end
|
33
|
+
|
34
|
+
def TaskMaster.enqueue(t, &cb)
|
35
|
+
t.execute()
|
36
|
+
cb.call(@lastReply, t, 0)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
@@ -0,0 +1,300 @@
|
|
1
|
+
require 'CompLearnLib/Task'
|
2
|
+
require 'CompLearnLib/RunEnv.rb'
|
3
|
+
require 'CompLearnLib/CLConfig.rb'
|
4
|
+
include MRunEnv
|
5
|
+
include MTask
|
6
|
+
|
7
|
+
module MTree
|
8
|
+
|
9
|
+
class TreeTask < Task
|
10
|
+
def initialize(tree, tries, penalty)
|
11
|
+
@tree, @tries, @penalty = tree, tries, penalty
|
12
|
+
end
|
13
|
+
def execute()
|
14
|
+
dm = fetch('dm')
|
15
|
+
ts = TreeScore.makeFullList(dm)
|
16
|
+
ts.penalty = @penalty
|
17
|
+
best, bestscore = nil, nil
|
18
|
+
@tries.times {
|
19
|
+
maybe = @tree.clone
|
20
|
+
maybe.mutateComplex
|
21
|
+
score = ts.score(maybe)
|
22
|
+
if (best == nil || bestscore < score)
|
23
|
+
best = maybe
|
24
|
+
bestscore = score
|
25
|
+
end
|
26
|
+
}
|
27
|
+
reply([best, bestscore])
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
class Tree
|
32
|
+
# consists of 2n - 2 nodes: n species followed by n-2 kernel nodes
|
33
|
+
attr_reader :edges
|
34
|
+
def initialize()
|
35
|
+
@edges = [ ]
|
36
|
+
@spm = nil
|
37
|
+
end
|
38
|
+
def clone()
|
39
|
+
t = Tree.new
|
40
|
+
t.copyEdges(self)
|
41
|
+
t
|
42
|
+
end
|
43
|
+
def <=>(other)
|
44
|
+
res = @edges.size - other.edges.size
|
45
|
+
return res unless res == 0
|
46
|
+
@edges.each_index { |i|
|
47
|
+
res = @edges[i].size - other.edges[i].size
|
48
|
+
return res unless res == 0
|
49
|
+
@edges[i].each_index { |j|
|
50
|
+
res = @edges[i][j] - other.edges[i][j]
|
51
|
+
return res unless res == 0
|
52
|
+
}
|
53
|
+
}
|
54
|
+
0
|
55
|
+
end
|
56
|
+
def copyEdges(t)
|
57
|
+
@edges = [ ]
|
58
|
+
t.edges.each_index { |i|
|
59
|
+
@edges[i] = t.edges[i].clone
|
60
|
+
}
|
61
|
+
@spm = nil
|
62
|
+
end
|
63
|
+
def makeName(i, names)
|
64
|
+
if (i < names.size)
|
65
|
+
return names[i].gsub(/[.].*/,'')
|
66
|
+
else
|
67
|
+
return CLConfig.getDefaultConfig.internalNodePrefix() + (i - names.size).to_s
|
68
|
+
end
|
69
|
+
end
|
70
|
+
def toDotString(names, title, desc)
|
71
|
+
result = "/* #{desc} */\ngraph #{title} {\n"
|
72
|
+
@edges.each_index { |i|
|
73
|
+
@edges[i].each { |j|
|
74
|
+
result += "#{makeName(i,names)} -- #{makeName(j,names)}\n"
|
75
|
+
}
|
76
|
+
}
|
77
|
+
result += "}\n"
|
78
|
+
result
|
79
|
+
end
|
80
|
+
def to_s()
|
81
|
+
result = ""
|
82
|
+
@edges.each_index { |i|
|
83
|
+
result += "#{i}->"+@edges[i].join(",")+"\n"
|
84
|
+
}
|
85
|
+
result
|
86
|
+
end
|
87
|
+
def Tree.randomTree(species)
|
88
|
+
raise "Not enough species" unless (species >= 3)
|
89
|
+
result = Tree.new
|
90
|
+
k = species - 2
|
91
|
+
nodes = species+k
|
92
|
+
(k-1).times { |i|
|
93
|
+
n = nil
|
94
|
+
begin
|
95
|
+
n = RunEnv.zrand(i+1) + species
|
96
|
+
end until result.getNeighborCount(n) <= 2
|
97
|
+
result.connect(i+species+1,n)
|
98
|
+
}
|
99
|
+
species.times { |i|
|
100
|
+
n = nil
|
101
|
+
begin
|
102
|
+
n = RunEnv.zrand(k) + species
|
103
|
+
end until result.getNeighborCount(n) <= 2
|
104
|
+
result.connect(i,n)
|
105
|
+
}
|
106
|
+
result
|
107
|
+
end
|
108
|
+
def calculateSPMFor(n)
|
109
|
+
@spm[n] = [ ]
|
110
|
+
curspm = @spm[n]
|
111
|
+
curspm[n] = n
|
112
|
+
visitted = { }
|
113
|
+
border = { n => true }
|
114
|
+
begin
|
115
|
+
border.each_key { |z|
|
116
|
+
visitted[z] = true
|
117
|
+
border.delete(z)
|
118
|
+
getNeighbors(z).each { |m|
|
119
|
+
unless visitted[m]
|
120
|
+
border[m] = true
|
121
|
+
curspm[m] = z
|
122
|
+
end
|
123
|
+
}
|
124
|
+
}
|
125
|
+
end until visitted.size == @edges.size
|
126
|
+
end
|
127
|
+
def calculateSPM()
|
128
|
+
@spm = [ ]
|
129
|
+
@edges.each_index { |n| calculateSPMFor(n) }
|
130
|
+
end
|
131
|
+
def findPath(a,b)
|
132
|
+
calculateSPM() unless @spm
|
133
|
+
path = [ ]
|
134
|
+
curspm = @spm[b]
|
135
|
+
cur = a
|
136
|
+
while (cur != b)
|
137
|
+
path << cur
|
138
|
+
cur = curspm[cur]
|
139
|
+
end
|
140
|
+
path << cur
|
141
|
+
path
|
142
|
+
end
|
143
|
+
def getNodeCount()
|
144
|
+
@edges.size
|
145
|
+
end
|
146
|
+
def getSpeciesCount()
|
147
|
+
(getNodeCount()+2) / 2
|
148
|
+
end
|
149
|
+
def getKernelCount()
|
150
|
+
getSpeciesCount() - 2
|
151
|
+
end
|
152
|
+
def isSpecies?(node)
|
153
|
+
getNeighborCount(node) == 1
|
154
|
+
end
|
155
|
+
def isKernel?(node)
|
156
|
+
getNeighborCount(node) == 3
|
157
|
+
end
|
158
|
+
def getNeighbors(node)
|
159
|
+
@edges[node]
|
160
|
+
end
|
161
|
+
def randomNeighbor(node)
|
162
|
+
@edges[node][RunEnv.zrand(@edges[node].size)]
|
163
|
+
end
|
164
|
+
def getNeighborCount(node)
|
165
|
+
return 0 unless @edges[node]
|
166
|
+
@edges[node].size
|
167
|
+
end
|
168
|
+
def connected?(a,b)
|
169
|
+
@edges[a] && @edges[a].include?(b)
|
170
|
+
end
|
171
|
+
def disconnect(a,b)
|
172
|
+
@edges[a].delete(b)
|
173
|
+
@edges[b].delete(a)
|
174
|
+
@edges[a] = nil if @edges[a].size == 0
|
175
|
+
@edges[b] = nil if @edges[b].size == 0
|
176
|
+
@spm = nil
|
177
|
+
end
|
178
|
+
def connect(a,b)
|
179
|
+
@edges[a] = [ ] unless @edges[a]
|
180
|
+
@edges[b] = [ ] unless @edges[b]
|
181
|
+
@edges[a] << b
|
182
|
+
@edges[b] << a
|
183
|
+
@edges[a].sort!
|
184
|
+
@edges[b].sort!
|
185
|
+
@spm = nil
|
186
|
+
end
|
187
|
+
def randomSpeciesNode()
|
188
|
+
n = nil
|
189
|
+
begin
|
190
|
+
n = randomNode()
|
191
|
+
end until isSpecies?(n)
|
192
|
+
n
|
193
|
+
end
|
194
|
+
def randomSpeciesNodes(count)
|
195
|
+
raise "Not enough species nodes" unless getSpeciesCount() >= count
|
196
|
+
res = { }
|
197
|
+
begin
|
198
|
+
res[randomSpeciesNode()] = true
|
199
|
+
end until res.size == count
|
200
|
+
res.keys
|
201
|
+
end
|
202
|
+
def randomNode()
|
203
|
+
RunEnv.zrand(getNodeCount())
|
204
|
+
end
|
205
|
+
def randomNodes(count)
|
206
|
+
raise "Not enough nodes" unless getNodeCount() >= count
|
207
|
+
res = { }
|
208
|
+
begin
|
209
|
+
res[randomNode()] = true
|
210
|
+
end until res.size == count
|
211
|
+
res.keys
|
212
|
+
end
|
213
|
+
def randomKernelNodes(count)
|
214
|
+
raise "Not enough kernel nodes" unless getKernelCount() >= count
|
215
|
+
res = { }
|
216
|
+
begin
|
217
|
+
res[randomKernelNode()] = true
|
218
|
+
end until res.size == count
|
219
|
+
res.keys
|
220
|
+
end
|
221
|
+
def mutateSpecies()
|
222
|
+
begin
|
223
|
+
a,b = randomSpeciesNodes(2)
|
224
|
+
na,nb = getNeighbors(a)[0],getNeighbors(b)[0]
|
225
|
+
end until na != nb
|
226
|
+
disconnect(a,na)
|
227
|
+
disconnect(b,nb)
|
228
|
+
connect(a,nb)
|
229
|
+
connect(b,na)
|
230
|
+
end
|
231
|
+
|
232
|
+
def mutateSubtreeInterchange()
|
233
|
+
p = nil
|
234
|
+
begin
|
235
|
+
a,b = randomKernelNodes(2)
|
236
|
+
p = findPath(a,b)
|
237
|
+
end until (p.size > 3)
|
238
|
+
na,nb = p[1],p[-2]
|
239
|
+
disconnect(a,na)
|
240
|
+
disconnect(b,nb)
|
241
|
+
connect(a,nb)
|
242
|
+
connect(b,na)
|
243
|
+
end
|
244
|
+
|
245
|
+
def mutateSubtreeTransfer()
|
246
|
+
begin
|
247
|
+
k1, k2 = randomNode, randomKernelNode()
|
248
|
+
p = findPath(k1,k2)
|
249
|
+
end until p.size > 2
|
250
|
+
i1 = p[1]
|
251
|
+
disconnect(k1,i1)
|
252
|
+
ms = getNeighbors(i1)
|
253
|
+
m1,m2,m3 = ms[0],ms[1],nil
|
254
|
+
begin
|
255
|
+
m3 = randomNeighbor(k2)
|
256
|
+
end until m3 != p[-2]
|
257
|
+
[[m1,i1], [m2,i1], [m3,k2]].each { |a| disconnect(a[0],a[1]) }
|
258
|
+
[[m1,m2], [k2,i1], [m3,i1], [k1,i1]].each { |a| connect(a[0],a[1]) }
|
259
|
+
end
|
260
|
+
|
261
|
+
def randomKernelNode()
|
262
|
+
n = nil
|
263
|
+
begin
|
264
|
+
n = randomNode()
|
265
|
+
end until isKernel?(n)
|
266
|
+
n
|
267
|
+
end
|
268
|
+
|
269
|
+
def mutateRandom()
|
270
|
+
begin
|
271
|
+
c = RunEnv.zrand(3)
|
272
|
+
end while c == 1 && getNodeCount <= 10
|
273
|
+
case c
|
274
|
+
when 0 then mutateSpecies
|
275
|
+
when 1 then mutateSubtreeInterchange
|
276
|
+
when 2 then mutateSubtreeTransfer
|
277
|
+
end
|
278
|
+
end
|
279
|
+
|
280
|
+
def mutateComplex()
|
281
|
+
orig = clone()
|
282
|
+
begin
|
283
|
+
mutateRandom
|
284
|
+
end while RunEnv.zrand(0) < 0.5 || (orig <=> self) == 0
|
285
|
+
end
|
286
|
+
|
287
|
+
def verifyTree()
|
288
|
+
oldNeighborCount = 0
|
289
|
+
@edges.each_index { |i|
|
290
|
+
s = @edges[i].size
|
291
|
+
return false if s != 1 && s != 3
|
292
|
+
oldNeighborCount = s if (s > oldNeighborCount)
|
293
|
+
return false if s < oldNeighborCount
|
294
|
+
}
|
295
|
+
return true
|
296
|
+
end
|
297
|
+
private :calculateSPM, :calculateSPMFor
|
298
|
+
end
|
299
|
+
|
300
|
+
end
|