miga-base 0.2.0.6 → 0.2.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +3 -0
- data/LICENSE +201 -0
- data/README.md +17 -335
- data/Rakefile +31 -0
- data/actions/add_result +2 -5
- data/actions/add_taxonomy +4 -7
- data/actions/create_dataset +5 -6
- data/actions/create_project +2 -5
- data/actions/daemon +2 -5
- data/actions/download_dataset +88 -58
- data/actions/find_datasets +36 -38
- data/actions/import_datasets +2 -5
- data/actions/index_taxonomy +2 -5
- data/actions/list_datasets +47 -49
- data/actions/list_files +7 -11
- data/actions/unlink_dataset +2 -5
- data/bin/miga +1 -1
- data/lib/miga/common.rb +132 -0
- data/lib/miga/daemon.rb +229 -168
- data/lib/miga/dataset.rb +354 -277
- data/lib/miga/gui.rb +346 -269
- data/lib/miga/metadata.rb +115 -71
- data/lib/miga/project.rb +361 -259
- data/lib/miga/remote_dataset.rb +200 -148
- data/lib/miga/result.rb +150 -99
- data/lib/miga/tax_index.rb +124 -67
- data/lib/miga/taxonomy.rb +129 -100
- data/lib/miga/version.rb +57 -0
- data/lib/miga.rb +2 -77
- data/scripts/_distances_noref_nomulti.bash +2 -0
- data/scripts/_distances_ref_nomulti.bash +2 -0
- data/scripts/aai_distances.bash +1 -0
- data/scripts/ani_distances.bash +1 -0
- data/scripts/assembly.bash +1 -0
- data/scripts/cds.bash +1 -0
- data/scripts/clade_finding.bash +17 -1
- data/scripts/distances.bash +1 -0
- data/scripts/essential_genes.bash +1 -0
- data/scripts/haai_distances.bash +1 -0
- data/scripts/init.bash +2 -0
- data/scripts/mytaxa.bash +1 -0
- data/scripts/mytaxa_scan.bash +1 -0
- data/scripts/ogs.bash +1 -0
- data/scripts/read_quality.bash +1 -0
- data/scripts/ssu.bash +1 -0
- data/scripts/subclades.bash +1 -0
- data/scripts/trimmed_fasta.bash +1 -0
- data/scripts/trimmed_reads.bash +1 -0
- data/test/common_test.rb +82 -0
- data/test/daemon_test.rb +53 -0
- data/test/dataset_test.rb +156 -0
- data/test/jruby_gui_test.rb +20 -0
- data/test/metadata_test.rb +48 -0
- data/test/project_test.rb +54 -0
- data/test/remote_dataset_test.rb +41 -0
- data/test/tax_index_test.rb +44 -0
- data/test/taxonomy_test.rb +36 -0
- data/test/test_helper.rb +32 -0
- metadata +53 -38
data/lib/miga/remote_dataset.rb
CHANGED
@@ -1,154 +1,206 @@
|
|
1
|
-
#
|
2
1
|
# @package MiGA
|
3
|
-
# @
|
4
|
-
# @license artistic license 2.0
|
5
|
-
# @update Dec-07-2015
|
6
|
-
#
|
2
|
+
# @license Artistic-2.0
|
7
3
|
|
8
4
|
require "restclient"
|
5
|
+
require "open-uri"
|
9
6
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
}
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
raise "I don't know how to extract ncbi_taxids from #{universe}."
|
136
|
-
end
|
137
|
-
end
|
138
|
-
def get_ncbi_taxonomy
|
139
|
-
lineage = {}
|
140
|
-
tax_id = get_ncbi_taxid
|
141
|
-
loop do
|
142
|
-
break if tax_id.nil? or %w{0 1}.include? tax_id
|
143
|
-
doc = RemoteDataset.download(:ebi, :taxonomy, tax_id, "")
|
144
|
-
name = (doc.scan(/SCIENTIFIC NAME\s+:\s+(.+)/).first||[]).first
|
145
|
-
rank = (doc.scan(/RANK\s+:\s+(.+)/).first||[]).first
|
146
|
-
rank = "dataset" if lineage.empty? and rank=="no rank"
|
147
|
-
lineage[rank] = name unless rank.nil?
|
148
|
-
tax_id = (doc.scan(/PARENT ID\s+:\s+(.+)/).first||[]).first
|
149
|
-
end
|
150
|
-
Taxonomy.new(lineage)
|
7
|
+
##
|
8
|
+
# MiGA representation of datasets with data in remote locations.
|
9
|
+
class MiGA::RemoteDataset < MiGA::MiGA
|
10
|
+
# Class-level
|
11
|
+
|
12
|
+
##
|
13
|
+
# Structure of the different database Universes or containers. The structure
|
14
|
+
# is a Hash with universe names as keys as Symbol and values being a Hash with
|
15
|
+
# supported keys as Symbol:
|
16
|
+
# - +:dbs+ => Hash with keys being the database name and the values a Hash of
|
17
|
+
# properties such as +stage+, +format+, and +map_to+.
|
18
|
+
# - +url+ => Pattern of the URL where the data can be obtained, where +%1$s+
|
19
|
+
# is the name of the database, +%2$s+ is the IDs, and +%3$s+ is format.
|
20
|
+
# - +method+ => Method used to query the URL. Only +:rest+ is currently
|
21
|
+
# supported.
|
22
|
+
# - +map_to_universe+ => Universe where results map to. Currently unsupported.
|
23
|
+
def self.UNIVERSE ; @@UNIVERSE ; end
|
24
|
+
@@UNIVERSE = {
|
25
|
+
web:{
|
26
|
+
dbs: {
|
27
|
+
assembly:{stage: :assembly, format: :fasta},
|
28
|
+
assembly_gz:{stage: :assembly, format: :fasta_gz}
|
29
|
+
},
|
30
|
+
url: "%2$s",
|
31
|
+
method: :net
|
32
|
+
},
|
33
|
+
ebi:{
|
34
|
+
dbs: { embl:{stage: :assembly, format: :fasta} },
|
35
|
+
url: "http://www.ebi.ac.uk/Tools/dbfetch/dbfetch/%1$s/%2$s/%3$s",
|
36
|
+
method: :rest
|
37
|
+
},
|
38
|
+
ncbi:{
|
39
|
+
dbs: { nuccore:{stage: :assembly, format: :fasta} },
|
40
|
+
url: "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/" +
|
41
|
+
"efetch.fcgi?db=%1$s&id=%2$s&rettype=%3$s&retmode=text",
|
42
|
+
method: :rest
|
43
|
+
},
|
44
|
+
ncbi_map:{
|
45
|
+
dbs: { assembly:{map_to: :nuccore, format: :text} },
|
46
|
+
url: "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/" +
|
47
|
+
# FIXME ncbi_map is intended to do internal NCBI mapping between
|
48
|
+
# databases.
|
49
|
+
"elink.fcgi?dbfrom=%1$s&id=%2$s&db=%3$s - - - - -",
|
50
|
+
method: :rest,
|
51
|
+
map_to_universe: :ncbi
|
52
|
+
}
|
53
|
+
}
|
54
|
+
|
55
|
+
##
|
56
|
+
# Download data from the +universe+ in the database +db+ with IDs +ids+ and
|
57
|
+
# in +format+. If passed, it saves the result in +file+. Returns String.
|
58
|
+
def self.download(universe, db, ids, format, file=nil)
|
59
|
+
ids = [ids] unless ids.is_a? Array
|
60
|
+
case @@UNIVERSE[universe][:method]
|
61
|
+
when :rest
|
62
|
+
map_to = @@UNIVERSE[universe][:dbs][db].nil? ? nil :
|
63
|
+
@@UNIVERSE[universe][:dbs][db][:map_to]
|
64
|
+
url = sprintf @@UNIVERSE[universe][:url],
|
65
|
+
db, ids.join(","), format, map_to
|
66
|
+
response = RestClient::Request.execute(:method=>:get, :url=>url,
|
67
|
+
:timeout=>600)
|
68
|
+
raise "Unable to reach #{universe} client, error code "+
|
69
|
+
"#{response.code}." unless response.code == 200
|
70
|
+
doc = response.to_s
|
71
|
+
when :net
|
72
|
+
url = sprintf @@UNIVERSE[universe][:url],
|
73
|
+
db, ids.join(","), format, map_to
|
74
|
+
doc = ""
|
75
|
+
open(url) { |f| doc = f.read }
|
76
|
+
end
|
77
|
+
unless file.nil?
|
78
|
+
ofh = File.open(file, "w")
|
79
|
+
ofh.print doc
|
80
|
+
ofh.close
|
81
|
+
end
|
82
|
+
doc
|
83
|
+
end
|
84
|
+
|
85
|
+
# Instance-level
|
86
|
+
|
87
|
+
##
|
88
|
+
# Universe of the dataset.
|
89
|
+
attr_reader :universe
|
90
|
+
# Database storing the dataset.
|
91
|
+
attr_reader :db
|
92
|
+
# IDs of the entries composing the dataset.
|
93
|
+
attr_reader :ids
|
94
|
+
|
95
|
+
##
|
96
|
+
# Initialize MiGA::RemoteDataset with +ids+ in database +db+ from +universe+.
|
97
|
+
def initialize(ids, db, universe)
|
98
|
+
ids = [ids] unless ids.is_a? Array
|
99
|
+
@ids = (ids.is_a?(Array) ? ids : [ids])
|
100
|
+
@db = db.to_sym
|
101
|
+
@universe = universe.to_sym
|
102
|
+
raise "Unknown Universe: #{@universe}. Try one of: "+
|
103
|
+
"#{@@UNIVERSE.keys}" unless @@UNIVERSE.keys.include? @universe
|
104
|
+
raise "Unknown Database: #{@db}. Try one of: "+
|
105
|
+
"#{@@UNIVERSE[@universe][:dbs]}" unless
|
106
|
+
@@UNIVERSE[@universe][:dbs].include? @db
|
107
|
+
# FIXME Part of the +map_to+ support:
|
108
|
+
#unless @@UNIVERSE[@universe][:dbs][@db][:map_to].nil?
|
109
|
+
# MiGA::RemoteDataset.download
|
110
|
+
#end
|
111
|
+
end
|
112
|
+
|
113
|
+
##
|
114
|
+
# Save dataset to the MiGA::Project +project+ identified with +name+. +is_ref+
|
115
|
+
# indicates if it should be a reference dataset, and contains +metadata+.
|
116
|
+
def save_to(project, name=nil, is_ref=true, metadata={})
|
117
|
+
name = ids.join("_").miga_name if name.nil?
|
118
|
+
project = MiGA::Project.new(project) if project.is_a? String
|
119
|
+
raise "Dataset #{name} exists in the project, aborting..." if
|
120
|
+
MiGA::Dataset.exist?(project, name)
|
121
|
+
metadata = get_metadata(metadata)
|
122
|
+
case @@UNIVERSE[universe][:dbs][db][:stage]
|
123
|
+
when :assembly
|
124
|
+
base = project.path + "/data/" + MiGA::Dataset.RESULT_DIRS[:assembly] +
|
125
|
+
"/" + name
|
126
|
+
File.open("#{base}.start", "w") { |ofh| ofh.puts Time.now.to_s }
|
127
|
+
if @@UNIVERSE[universe][:dbs][db][:format] == :fasta_gz
|
128
|
+
download("#{base}.LargeContigs.fna.gz")
|
129
|
+
system("gzip -d #{base}.LargeContigs.fna.gz")
|
130
|
+
else
|
131
|
+
download("#{base}.LargeContigs.fna")
|
151
132
|
end
|
152
|
-
|
153
|
-
|
133
|
+
File.symlink("#{base}.LargeContigs.fna", "#{base}.AllContigs.fna")
|
134
|
+
File.open("#{base}.done", "w") { |ofh| ofh.puts Time.now.to_s }
|
135
|
+
else
|
136
|
+
raise "Unexpected error: Unsupported result for database #{db}."
|
137
|
+
end
|
138
|
+
dataset = MiGA::Dataset.new(project, name, is_ref, metadata)
|
139
|
+
project.add_dataset(dataset.name)
|
140
|
+
result = dataset.add_result @@UNIVERSE[universe][:dbs][db][:stage]
|
141
|
+
raise "Empty dataset created: seed result was not added due to "+
|
142
|
+
"incomplete files." if result.nil?
|
143
|
+
dataset
|
144
|
+
end
|
145
|
+
|
146
|
+
##
|
147
|
+
# Get metadata from the remote location.
|
148
|
+
def get_metadata(metadata={})
|
149
|
+
case universe
|
150
|
+
when :ebi, :ncbi
|
151
|
+
# Get taxonomy
|
152
|
+
metadata[:tax] = get_ncbi_taxonomy
|
153
|
+
end
|
154
|
+
metadata
|
155
|
+
end
|
156
|
+
|
157
|
+
##
|
158
|
+
# Download data into +file+.
|
159
|
+
def download(file)
|
160
|
+
MiGA::RemoteDataset.download(universe, db, ids,
|
161
|
+
@@UNIVERSE[universe][:dbs][db][:format], file)
|
162
|
+
end
|
154
163
|
|
164
|
+
##
|
165
|
+
# Get NCBI Taxonomy ID.
|
166
|
+
def get_ncbi_taxid
|
167
|
+
send("get_ncbi_taxid_from_#{universe}")
|
168
|
+
end
|
169
|
+
|
170
|
+
##
|
171
|
+
# Get NCBI taxonomy as MiGA::Taxonomy.
|
172
|
+
def get_ncbi_taxonomy
|
173
|
+
lineage = {}
|
174
|
+
tax_id = get_ncbi_taxid
|
175
|
+
while !(tax_id.nil? or %w{0 1}.include? tax_id)
|
176
|
+
doc = MiGA::RemoteDataset.download(:ebi, :taxonomy, tax_id, "")
|
177
|
+
name = doc.scan(/SCIENTIFIC NAME\s+:\s+(.+)/).first.to_a.first
|
178
|
+
rank = doc.scan(/RANK\s+:\s+(.+)/).first.to_a.first
|
179
|
+
rank = "dataset" if lineage.empty? and rank=="no rank"
|
180
|
+
lineage[rank] = name unless rank.nil?
|
181
|
+
tax_id = doc.scan(/PARENT ID\s+:\s+(.+)/).first.to_a.first
|
182
|
+
end
|
183
|
+
MiGA::Taxonomy.new(lineage)
|
184
|
+
end
|
185
|
+
|
186
|
+
private
|
187
|
+
|
188
|
+
def get_ncbi_taxid_from_ncbi
|
189
|
+
doc = MiGA::RemoteDataset.download(universe, db, ids, :gb).split(/\n/)
|
190
|
+
ln = doc.grep(/^\s+\/db_xref="taxon:/).first
|
191
|
+
return nil if ln.nil?
|
192
|
+
ln.sub!(/.*(?:"taxon:)(\d+)["; ].*/, "\\1")
|
193
|
+
return nil unless ln =~ /^\d+$/
|
194
|
+
ln
|
195
|
+
end
|
196
|
+
|
197
|
+
def get_ncbi_taxid_from_ebi
|
198
|
+
doc = MiGA::RemoteDataset.download(universe, db, ids, :annot).split(/\n/)
|
199
|
+
ln = doc.grep(/^FT\s+\/db_xref="taxon:/).first
|
200
|
+
ln = doc.grep(/^OX\s+NCBI_TaxID=/).first if ln.nil?
|
201
|
+
return nil if ln.nil?
|
202
|
+
ln.sub!(/.*(?:"taxon:|NCBI_TaxID=)(\d+)["; ].*/, "\\1")
|
203
|
+
return nil unless ln =~ /^\d+$/
|
204
|
+
ln
|
205
|
+
end
|
206
|
+
end
|
data/lib/miga/result.rb
CHANGED
@@ -1,102 +1,153 @@
|
|
1
|
-
#
|
2
1
|
# @package MiGA
|
3
|
-
# @
|
4
|
-
|
5
|
-
|
6
|
-
#
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
2
|
+
# @license Artistic-2.0
|
3
|
+
|
4
|
+
##
|
5
|
+
# The result from a task run. It can be project-wide or dataset-specific.
|
6
|
+
class MiGA::Result < MiGA::MiGA
|
7
|
+
|
8
|
+
# Class-level
|
9
|
+
|
10
|
+
##
|
11
|
+
# Check if the result described by the JSON in +path+ already exists.
|
12
|
+
def self.exist?(path)
|
13
|
+
!!(File.size? path)
|
14
|
+
end
|
15
|
+
|
16
|
+
##
|
17
|
+
# Load the result described by the JSON in +path+. Returns MiGA::Result if it
|
18
|
+
# already exists, nil otherwise.
|
19
|
+
def self.load(path)
|
20
|
+
return nil unless MiGA::Result.exist? path
|
21
|
+
MiGA::Result.new(path)
|
22
|
+
end
|
23
|
+
|
24
|
+
# Instance-level
|
25
|
+
|
26
|
+
##
|
27
|
+
# Path to the JSON file describing the result.
|
28
|
+
attr_reader :path
|
29
|
+
|
30
|
+
##
|
31
|
+
# Hash with the result metadata.
|
32
|
+
attr_reader :data
|
33
|
+
|
34
|
+
##
|
35
|
+
# Array of MiGA::Result objects nested within the result (if any).
|
36
|
+
attr_reader :results
|
37
|
+
|
38
|
+
##
|
39
|
+
# Load or create the MiGA::Result described by the JSON file +path+.
|
40
|
+
def initialize(path)
|
41
|
+
@path = path
|
42
|
+
MiGA::Result.exist?(path) ? self.load : create
|
43
|
+
end
|
44
|
+
|
45
|
+
##
|
46
|
+
# Directory containing the result.
|
47
|
+
def dir
|
48
|
+
File.dirname(path)
|
49
|
+
end
|
50
|
+
|
51
|
+
##
|
52
|
+
# Absolute path to the file(s) defined by symbol +k+.
|
53
|
+
def file_path(k)
|
54
|
+
k = k.to_sym
|
55
|
+
f = self[:files].nil? ? nil : self[:files][k]
|
56
|
+
return nil if f.nil?
|
57
|
+
return File.expand_path(f, dir) unless f.is_a? Array
|
58
|
+
f.map{ |fi| File.expand_path(fi, dir) }
|
59
|
+
end
|
60
|
+
|
61
|
+
##
|
62
|
+
# Entry with symbol +k+.
|
63
|
+
def [](k) data[k.to_sym] ; end
|
64
|
+
|
65
|
+
##
|
66
|
+
# Register +file+ (path relative to #dir) with the symbol +k+. If the file
|
67
|
+
# doesn't exist but the .gz extension does, the gzipped file is registered
|
68
|
+
# instead. If neither exists, nothing is registered.
|
69
|
+
def add_file(k, file)
|
70
|
+
k = k.to_sym
|
71
|
+
@data[:files] ||= {}
|
72
|
+
@data[:files][k] = file if File.exist? File.expand_path(file, dir)
|
73
|
+
@data[:files][k] = file + ".gz" if
|
74
|
+
File.exist? File.expand_path(file + ".gz", dir)
|
75
|
+
end
|
76
|
+
|
77
|
+
##
|
78
|
+
# #add_file for each key-value pair in the +files+ Hash.
|
79
|
+
def add_files(files)
|
80
|
+
files.each { |k, v| add_file(k, v) }
|
81
|
+
end
|
82
|
+
|
83
|
+
##
|
84
|
+
# Initialize and #save empty result.
|
85
|
+
def create
|
86
|
+
@data = {:created=>Time.now.to_s, :results=>[], :stats=>{}, :files=>{}}
|
87
|
+
save
|
88
|
+
end
|
89
|
+
|
90
|
+
##
|
91
|
+
# Save the result persistently (in the JSON file #path).
|
92
|
+
def save
|
93
|
+
@data[:updated] = Time.now.to_s
|
94
|
+
json = JSON.pretty_generate data
|
95
|
+
ofh = File.open(path, "w")
|
96
|
+
ofh.puts json
|
97
|
+
ofh.close
|
98
|
+
self.load
|
99
|
+
end
|
100
|
+
|
101
|
+
##
|
102
|
+
# Load (or reload) result data in the JSON file #path.
|
103
|
+
def load
|
104
|
+
json = File.read(path)
|
105
|
+
@data = JSON.parse(json, {:symbolize_names=>true})
|
106
|
+
@data[:files] ||= {}
|
107
|
+
@results = self[:results].map{ |rs| MiGA::Result.new rs }
|
108
|
+
end
|
109
|
+
|
110
|
+
##
|
111
|
+
# Remove result, including all associated files.
|
112
|
+
def remove!
|
113
|
+
each_file do |file|
|
114
|
+
f = File.expand_path(file, dir)
|
115
|
+
FileUtils.rm_rf(f) if File.exist? f
|
116
|
+
end
|
117
|
+
%w(.start .done).each do |ext|
|
118
|
+
f = path.sub(/\.json$/, ext)
|
119
|
+
File.unlink f if File.exist? f
|
120
|
+
end
|
121
|
+
File.unlink path
|
122
|
+
end
|
123
|
+
|
124
|
+
##
|
125
|
+
# Iterate +blk+ for each registered file. If +blk+ calls for one argument, the
|
126
|
+
# relative path to the file is passed. If it calls for two arguments, the
|
127
|
+
# symbol describing the file is passed first and the path second. Note that
|
128
|
+
# multiple files may have the same symbol, since arrays of files are
|
129
|
+
# supported.
|
130
|
+
def each_file(&blk)
|
131
|
+
@data[:files] ||= {}
|
132
|
+
self[:files].each do |k,files|
|
133
|
+
files = [files] unless files.kind_of? Array
|
134
|
+
files.each do |file|
|
135
|
+
if blk.arity==1
|
136
|
+
blk.call(file)
|
137
|
+
elsif blk.arity==2
|
138
|
+
blk.call(k, file)
|
139
|
+
else
|
140
|
+
raise "Wrong number of arguments: #{blk.arity} for one or two"
|
141
|
+
end
|
99
142
|
end
|
100
|
-
|
101
|
-
end
|
143
|
+
end
|
144
|
+
end
|
102
145
|
|
146
|
+
##
|
147
|
+
# Add the MiGA::Result +result+ as part of the current result.
|
148
|
+
def add_result(result)
|
149
|
+
@data[:results] << result.path
|
150
|
+
save
|
151
|
+
end
|
152
|
+
|
153
|
+
end
|