protk 1.3.1.pre3 → 1.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +21 -19
- data/bin/add_retention_times.rb +1 -1
- data/bin/interprophet.rb +16 -5
- data/bin/make_decoy.rb +1 -1
- data/bin/manage_db.rb +1 -1
- data/bin/mascot_search.rb +2 -2
- data/bin/mascot_to_pepxml.rb +1 -1
- data/bin/msgfplus_search.rb +26 -9
- data/bin/omssa_search.rb +1 -1
- data/bin/peptide_prophet.rb +57 -20
- data/bin/pepxml_to_table.rb +15 -2
- data/bin/protein_prophet.rb +41 -1
- data/bin/protk_setup.rb +2 -2
- data/bin/protxml_to_gff.rb +50 -42
- data/bin/protxml_to_psql.rb +1 -1
- data/bin/protxml_to_table.rb +16 -3
- data/bin/repair_run_summary.rb +1 -1
- data/bin/sixframe.rb +2 -2
- data/bin/swissprot_to_table.rb +1 -1
- data/bin/tandem_search.rb +1 -1
- data/bin/tandem_to_pepxml.rb +1 -1
- data/lib/protk/constants.rb +2 -1
- data/lib/protk/convert_util.rb +1 -1
- data/lib/protk/data/tandem-style.css +349 -0
- data/lib/protk/data/tandem-style.xsl +264 -0
- data/lib/protk/data/tandem_gpm_defaults.xml +3 -3
- data/lib/protk/data/tandem_isb_kscore_defaults.xml +2 -0
- data/lib/protk/data/tandem_isb_native_defaults.xml +3 -0
- data/lib/protk/data/tandem_params.xml +0 -8
- data/lib/protk/fastadb.rb +1 -1
- data/lib/protk/galaxy_stager.rb +14 -3
- data/lib/protk/galaxy_util.rb +39 -31
- data/lib/protk/gffdb.rb +6 -1
- data/lib/protk/manage_db_rakefile.rake +1 -1
- data/lib/protk/manage_db_tool.rb +1 -1
- data/lib/protk/pepxml.rb +159 -7
- data/lib/protk/plasmodb.rb +1 -1
- data/lib/protk/prophet_tool.rb +20 -52
- data/lib/protk/setup_rakefile.rake +18 -11
- data/lib/protk/tandem_search_tool.rb +20 -7
- data/lib/protk/tool.rb +1 -1
- data/lib/protk/uniprot_mapper.rb +1 -1
- metadata +10 -14
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 22d2c990e46bf29f08cbf00dc2ecd9a759fae457
|
4
|
+
data.tar.gz: 09dd0159d8b564d9d297a987c74e13833fabbc24
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2b08c1086187da5755e4b0d98dcfebcff80893eb291e09291ea3853fa848ade7f464f83fc97a8fe362d2b9a96e915e2aa1945ddd6e0f39ada32d1a747a0c7d73
|
7
|
+
data.tar.gz: 767b474d9f0b890342f783bff0e8633434542cf151d34f239f9ed52dab43a1909b683fa285b25954af54856d68493ab67e4776d152830bc938e62a932c63e9b3
|
data/README.md
CHANGED
@@ -12,7 +12,7 @@ Protk is a suite of tools for proteomics. It aims to present a simple and consis
|
|
12
12
|
- Tandem MS search with X!Tandem, Mascot, OMSSA and MS-GF+
|
13
13
|
- Peptide and Protein inference with Peptide Prophet, iProphet and Protein Prophet
|
14
14
|
- Conversion of pepXML or protXML to tabulular format
|
15
|
-
|
15
|
+
- Proteogenomics (mapping peptides to genomic coordinates)
|
16
16
|
|
17
17
|
## Installation
|
18
18
|
|
@@ -41,6 +41,7 @@ Protk consists of a suite of small ruby programs. After installing the protk ru
|
|
41
41
|
- `protxml_to_table.rb` Convert protXML to tabular format
|
42
42
|
- `make_decoy.rb` Generate semi-random decoy sequences
|
43
43
|
- `sixframe.rb` Generate six-frame translations of DNA sequences
|
44
|
+
- `protxml_to_gff.rb` Map peptides to genomic coordinates
|
44
45
|
- `protk_setup.rb` Install third party dependencies
|
45
46
|
- `manage_db.rb` Manage protein databases
|
46
47
|
|
@@ -52,7 +53,7 @@ Protk includes a setup tool to install various third party proteomics tools such
|
|
52
53
|
protk_setup.rb tpp omssa blast msgfplus pwiz
|
53
54
|
```
|
54
55
|
|
55
|
-
By default protk will install tools and databases into `.protk` in your home directory. If this is not desirable you can change the protk root default by setting the environment variable `PROTK_INSTALL_DIR`. If you prefer to install the tools yourself protk will find them provided they are included in your `$PATH`. Those executables will be used as a fallback if nothing is available under the `.protk` installation directory. A common source of errors when running the protk_setup script is missing dependencies. The setup script has been tested on ubuntu 12 with the following dependencies installed;
|
56
|
+
By default protk will install tools and databases into `.protk` in your home directory. If this is not desirable you can change the protk root default by setting the environment variable `PROTK_INSTALL_DIR`. If you prefer to install the tools yourself protk will find them, provided they are included in your `$PATH`. Those executables will be used as a fallback if nothing is available under the `.protk` installation directory. A common source of errors when running the protk_setup script is missing dependencies. The setup script has been tested on ubuntu 12 with the following dependencies installed;
|
56
57
|
|
57
58
|
```
|
58
59
|
apt-get install build-essential autoconf automake git-core mercurial subversion pkg-config libc6-dev curl libxml2-dev openjdk-6-jre libbz2-dev libgd2-noxpm-dev unzip
|
@@ -98,13 +99,13 @@ Many protk tools have equivalent galaxy wrappers available on the [galaxy toolsh
|
|
98
99
|
|
99
100
|
4. Install protk in an isolated gemset using rvm.
|
100
101
|
|
101
|
-
This sets up an isolated environment where only a specific version of protk is available. We name the environment according to the protk intermediate version numer (1.
|
102
|
+
This sets up an isolated environment where only a specific version of protk is available. We name the environment according to the protk intermediate version numer (1.4 in this example). Minor bugfixes will be released as 1.4.x and can be installed without updating the toolshed wrappers
|
102
103
|
|
103
104
|
```bash
|
104
105
|
rvm 2.1
|
105
|
-
rvm gemset create protk1.
|
106
|
-
rvm use 2.1@protk1.
|
107
|
-
gem install protk -v '~>1.
|
106
|
+
rvm gemset create protk1.4
|
107
|
+
rvm use 2.1@protk1.4
|
108
|
+
gem install protk -v '~>1.4'
|
108
109
|
```
|
109
110
|
|
110
111
|
5. Configure Galaxy's tool dependency directory.
|
@@ -118,32 +119,33 @@ Many protk tools have equivalent galaxy wrappers available on the [galaxy toolsh
|
|
118
119
|
|
119
120
|
6. Create a tool dependency that sets up protk in the environment created by rvm
|
120
121
|
|
121
|
-
In this example we create the environment for protk `1.
|
122
|
+
In this example we create the environment for protk `1.4` as this was the version installed in step 4 above.
|
122
123
|
|
123
124
|
```bash
|
124
125
|
cd <tool_dependency_dir>
|
125
126
|
mkdir protk
|
126
127
|
cd protk
|
127
|
-
mkdir 1.
|
128
|
-
ln -s 1.
|
129
|
-
|
130
|
-
|
128
|
+
mkdir 1.4
|
129
|
+
ln -s 1.4 default
|
130
|
+
rvm use 2.1@protk1.4
|
131
|
+
rvmenv=`rvm env --path 2.1@protk1.4`
|
132
|
+
echo ". $rvmenv" > 1.4/env.sh
|
131
133
|
```
|
132
134
|
|
133
135
|
7. Keep things up to date
|
134
136
|
|
135
|
-
When new versions of galaxy tools are released they may change the version of protk that is required. Check the release notes on the tool to see what is needed. For example, if upgrading to version 1.
|
137
|
+
When new versions of galaxy tools are released they may change the version of protk that is required. Check the release notes on the tool to see what is needed. For example, if upgrading to version 1.5 you would do the following;
|
136
138
|
|
137
139
|
```bash
|
138
140
|
rvm 2.1
|
139
|
-
rvm gemset create protk1.
|
140
|
-
rvm use 2.1@protk1.
|
141
|
-
gem install protk -v '~>1.
|
141
|
+
rvm gemset create protk1.5
|
142
|
+
rvm use 2.1@protk1.5
|
143
|
+
gem install protk -v '~>1.5'
|
142
144
|
cd <tool_dependency_dir>/protk/
|
143
|
-
mkdir 1.
|
144
|
-
rvmenv=`rvm env --path 2.1@protk1.
|
145
|
-
echo ". $rvmenv" > 1.
|
146
|
-
ln -s 1.
|
145
|
+
mkdir 1.5
|
146
|
+
rvmenv=`rvm env --path 2.1@protk1.5`
|
147
|
+
echo ". $rvmenv" > 1.5/env.sh
|
148
|
+
ln -s 1.5 default
|
147
149
|
```
|
148
150
|
|
149
151
|
|
data/bin/add_retention_times.rb
CHANGED
data/bin/interprophet.rb
CHANGED
@@ -19,6 +19,8 @@ for_galaxy = GalaxyUtil.for_galaxy?
|
|
19
19
|
prophet_tool=ProphetTool.new([
|
20
20
|
:explicit_output,
|
21
21
|
:over_write,
|
22
|
+
:probability_threshold,
|
23
|
+
:threads,
|
22
24
|
:prefix])
|
23
25
|
|
24
26
|
prophet_tool.option_parser.banner = "Run InterProphet on a set of pep.xml input files.\n\nUsage: interprophet.rb [options] file1.pep.xml file2.pep.xml ..."
|
@@ -30,13 +32,13 @@ prophet_tool.add_boolean_option(:no_nrs,false,['--no-nrs', 'Don\'t use NRS (Numb
|
|
30
32
|
prophet_tool.add_boolean_option(:no_nse,false,['--no-nse', 'Don\'t use NSE (Number of Sibling Experiments) in Model'])
|
31
33
|
prophet_tool.add_boolean_option(:no_nsi,false,["--no-nsi",'Don\'t use NSE (Number of Sibling Ions) in Model'])
|
32
34
|
prophet_tool.add_boolean_option(:no_nsm,false,["--no-nsm",'Don\'t use NSE (Number of Sibling Modifications) in Model'])
|
33
|
-
prophet_tool.add_value_option(:min_prob,"",["--minprob mp","Minimum probability cutoff "])
|
35
|
+
# prophet_tool.add_value_option(:min_prob,"",["--minprob mp","Minimum probability cutoff "])
|
34
36
|
|
35
37
|
exit unless prophet_tool.check_options(true)
|
36
38
|
|
37
39
|
|
38
40
|
# Obtain a global environment object
|
39
|
-
genv=Constants.
|
41
|
+
genv=Constants.instance
|
40
42
|
|
41
43
|
inputs = ARGV.collect {|file_name|
|
42
44
|
file_name.chomp
|
@@ -51,20 +53,23 @@ end
|
|
51
53
|
if ( !Pathname.new(output_file).exist? || prophet_tool.over_write )
|
52
54
|
|
53
55
|
cmd="InterProphetParser "
|
54
|
-
|
56
|
+
cmd<<"THREADS=#{prophet_tool.threads.to_i}" if prophet_tool.threads.to_i > 0
|
55
57
|
cmd<<"NONSS " if prophet_tool.options.no_nss
|
56
58
|
cmd<<"NONRS " if prophet_tool.options.no_nrs
|
57
59
|
cmd<<"NONSE " if prophet_tool.options.no_nse
|
58
60
|
cmd<<"NONSI " if prophet_tool.options.no_nsi
|
59
61
|
cmd<<"NONSM " if prophet_tool.options.no_nsm
|
60
62
|
|
61
|
-
|
63
|
+
|
64
|
+
cmd << " MINPROB=#{prophet_tool.probability_threshold}" if ( prophet_tool.probability_threshold !="" )
|
62
65
|
|
63
66
|
if for_galaxy
|
64
67
|
inputs = inputs.collect {|ip| GalaxyUtil.stage_pepxml(ip) }
|
65
68
|
end
|
66
69
|
|
67
|
-
|
70
|
+
input_files = inputs.collect { |e| e.staged_path }
|
71
|
+
|
72
|
+
cmd << " #{input_files.join(" ")} #{output_file}"
|
68
73
|
|
69
74
|
genv.log("Running #{cmd}",:info)
|
70
75
|
|
@@ -72,6 +77,12 @@ if ( !Pathname.new(output_file).exist? || prophet_tool.over_write )
|
|
72
77
|
#
|
73
78
|
code = prophet_tool.run(cmd,genv)
|
74
79
|
throw "Command failed with exit code #{code}" unless code==0
|
80
|
+
|
81
|
+
if for_galaxy
|
82
|
+
inputs.each do |ip_stager|
|
83
|
+
ip_stager.restore_references(output_file)
|
84
|
+
end
|
85
|
+
end
|
75
86
|
|
76
87
|
else
|
77
88
|
genv.log("Interprophet output file #{output_file} already exists. Run with -r option to replace",:warn)
|
data/bin/make_decoy.rb
CHANGED
data/bin/manage_db.rb
CHANGED
data/bin/mascot_search.rb
CHANGED
@@ -110,7 +110,7 @@ end
|
|
110
110
|
|
111
111
|
# Environment with global constants
|
112
112
|
#
|
113
|
-
$genv=Constants.
|
113
|
+
$genv=Constants.instance
|
114
114
|
|
115
115
|
# Setup specific command-line options for this tool. Other options are inherited from SearchTool
|
116
116
|
#
|
@@ -188,7 +188,7 @@ else
|
|
188
188
|
#site = RestClient::Resource.new(mascot_cgi, timeout=300)
|
189
189
|
#search_response=site['/nph-mascot.exe?1'].post , postdict, {:cookies=>cookie}
|
190
190
|
|
191
|
-
search_response=RestClient::Request.execute(:method => :post, :url => "#{mascot_cgi}/nph-mascot.exe?1", :payload => postdict,:headers=>{:cookies=>cookie},:timeout => search_tool.options.timeout, :open_timeout => 10)
|
191
|
+
search_response=RestClient::Request.execute(:method => :post, :url => "#{mascot_cgi}/nph-mascot.exe?1", :payload => postdict,:headers=>{:cookies=>cookie},:timeout => search_tool.options.timeout.to_i, :open_timeout => 10)
|
192
192
|
|
193
193
|
|
194
194
|
#search_response=RestClient.post "#{mascot_cgi}/nph-mascot.exe?1", postdict, {:cookies=>cookie}
|
data/bin/mascot_to_pepxml.rb
CHANGED
data/bin/msgfplus_search.rb
CHANGED
@@ -60,7 +60,7 @@ exit unless search_tool.check_options(true)
|
|
60
60
|
|
61
61
|
# Environment with global constants
|
62
62
|
#
|
63
|
-
genv=Constants.
|
63
|
+
genv=Constants.instance
|
64
64
|
|
65
65
|
# Set search engine specific parameters on the SearchTool object
|
66
66
|
#
|
@@ -81,17 +81,21 @@ db_info=search_tool.database_info
|
|
81
81
|
|
82
82
|
database_path=db_info.path
|
83
83
|
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
database_path=
|
89
|
-
|
84
|
+
database_stager=nil
|
85
|
+
|
86
|
+
if for_galaxy || Pathname.new(database_path).extname.to_s.downcase != ".fasta"
|
87
|
+
database_stager = GalaxyUtil.stage_fasta(database_path)
|
88
|
+
database_path = database_stager.staged_path
|
89
|
+
# # Database must have fasta extension
|
90
|
+
# if
|
91
|
+
# File.symlink(database_path,"#{database_path}.fasta") unless File.exists?("#{database_path}.fasta")
|
92
|
+
# # make_msgfdb_cmd << "ln -s #{database_path} #{database_path}.fasta;"
|
93
|
+
# database_path="#{database_path}.fasta"
|
90
94
|
end
|
91
95
|
|
92
96
|
# Database must be indexed
|
93
97
|
unless FileTest.exists?("#{database_path}.canno")
|
94
|
-
dbdir = Pathname.new(database_path).dirname.to_s
|
98
|
+
# dbdir = Pathname.new(database_path).dirname.to_s
|
95
99
|
tdavalue=search_tool.decoy_search ? 1 : 0;
|
96
100
|
make_msgfdb_cmd << "java -Xmx3500M -cp #{genv.msgfplusjar} edu.ucsd.msjava.msdbsearch.BuildSA -d #{database_path} -tda #{tdavalue}; "
|
97
101
|
end
|
@@ -210,8 +214,17 @@ ARGV.each do |filename|
|
|
210
214
|
cmd << ";ruby -pi.bak -e \"gsub('post=\\\"?','post=\\\"X')\" #{mzid_output_path}"
|
211
215
|
cmd << ";ruby -pi.bak -e \"gsub('pre=\\\"?','pre=\\\"X')\" #{mzid_output_path}"
|
212
216
|
cmd << ";idconvert #{mzid_output_path} --pepXML -o #{Pathname.new(mzid_output_path).dirname}"
|
217
|
+
|
218
|
+
|
219
|
+
pepxml_output_path = "#{mzid_output_path.chomp('.mzid')}.pepXML"
|
220
|
+
|
221
|
+
# Fix the msms_run_summary base_name attribute
|
222
|
+
#
|
223
|
+
if for_galaxy
|
224
|
+
cmd << ";ruby -pi.bak -e \"gsub(/ base_name=[^ ]+/,' base_name=\\\"#{original_input_file}\\\"')\" #{pepxml_output_path}"
|
225
|
+
end
|
213
226
|
#Then copy the pepxml to the final output path
|
214
|
-
cmd << "; mv #{
|
227
|
+
cmd << "; mv #{pepxml_output_path} #{output_path}"
|
215
228
|
else
|
216
229
|
cmd << "; mv #{mzid_output_path} #{output_path}"
|
217
230
|
end
|
@@ -228,6 +241,10 @@ ARGV.each do |filename|
|
|
228
241
|
input_stager.restore_references(output_path)
|
229
242
|
end
|
230
243
|
|
244
|
+
unless database_stager.nil?
|
245
|
+
database_stager.restore_references(output_path)
|
246
|
+
end
|
247
|
+
|
231
248
|
else
|
232
249
|
genv.log("Skipping search on existing file #{output_path}",:warn)
|
233
250
|
end
|
data/bin/omssa_search.rb
CHANGED
data/bin/peptide_prophet.rb
CHANGED
@@ -11,6 +11,7 @@ require 'protk/constants'
|
|
11
11
|
require 'protk/command_runner'
|
12
12
|
require 'protk/prophet_tool'
|
13
13
|
require 'protk/galaxy_util'
|
14
|
+
require 'protk/pepxml'
|
14
15
|
|
15
16
|
for_galaxy = GalaxyUtil.for_galaxy?
|
16
17
|
input_stager = nil
|
@@ -23,7 +24,9 @@ prophet_tool=ProphetTool.new([
|
|
23
24
|
:over_write,
|
24
25
|
:maldi,
|
25
26
|
:prefix,
|
26
|
-
:database
|
27
|
+
:database,
|
28
|
+
:threads,
|
29
|
+
:probability_threshold])
|
27
30
|
prophet_tool.option_parser.banner = "Run PeptideProphet on a set of pep.xml input files.\n\nUsage: peptide_prophet.rb [options] file1.pep.xml file2.pep.xml ..."
|
28
31
|
@output_suffix="_pproph"
|
29
32
|
prophet_tool.options.database=nil
|
@@ -49,39 +52,64 @@ exit unless prophet_tool.check_options(true)
|
|
49
52
|
throw "When --output and -F options are set only one file at a time can be run" if ( ARGV.length> 1 ) && ( prophet_tool.explicit_output!=nil ) && (prophet_tool.one_ata_time!=nil)
|
50
53
|
|
51
54
|
# Obtain a global environment object
|
52
|
-
genv=Constants.
|
55
|
+
genv=Constants.instance
|
53
56
|
|
54
57
|
input_stagers=[]
|
55
58
|
inputs=ARGV.collect { |file_name| file_name.chomp}
|
56
59
|
if for_galaxy
|
57
|
-
|
60
|
+
|
61
|
+
# Force a copy since we need to edit it temporarily
|
62
|
+
input_stagers = inputs.collect {|ip| GalaxyUtil.stage_pepxml(ip,:force_copy => true) }
|
63
|
+
|
64
|
+
# For each pepxml we will also have some raw files that need staging.
|
65
|
+
# And for each raw file we need to replace references to it in the pep.xml with the staged path
|
66
|
+
#
|
67
|
+
input_stagers.each do |pepxml_stager|
|
68
|
+
staged_pepxml_path = pepxml_stager.staged_path
|
69
|
+
raw_inputs = PepXML.new(staged_pepxml_path).find_runs()
|
70
|
+
|
71
|
+
raw_inputs.each_pair do |base_name,atts|
|
72
|
+
extension = atts[:type]=="" ? "" : ".#{atts[:type]}"
|
73
|
+
|
74
|
+
raw_input_stager = GalaxyStager.new(base_name, :extension => extension)
|
75
|
+
|
76
|
+
GalaxyStager.replace_references(staged_pepxml_path,base_name,raw_input_stager.staged_base_path)
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
58
80
|
inputs=input_stagers.collect { |sg| sg.staged_path }
|
59
81
|
end
|
60
82
|
|
61
83
|
# Interrogate all the input files to obtain the database and search engine from them
|
62
84
|
#
|
63
|
-
genv.log("Determining search engine and
|
85
|
+
genv.log("Determining search engine, database and digestion enzyme for input files ...",:info)
|
64
86
|
file_info={}
|
65
87
|
inputs.each {|file_name|
|
66
88
|
name=file_name.chomp
|
67
89
|
|
68
|
-
|
90
|
+
throw "Missing input file #{file_name}" unless File.exist?(file_name)
|
91
|
+
|
92
|
+
file_pepxml = PepXML.new(name)
|
93
|
+
|
94
|
+
engine=file_pepxml.extract_engine()
|
69
95
|
if prophet_tool.database
|
70
96
|
db_path = prophet_tool.database_info.path
|
71
97
|
else
|
72
|
-
db_path=
|
98
|
+
db_path=file_pepxml.extract_db()
|
73
99
|
throw "Unable to find database #{db_path} used for searching. Specify database path using -d option" unless File.exist?(db_path)
|
74
100
|
end
|
101
|
+
|
102
|
+
enzyme = file_pepxml.extract_enzyme()
|
75
103
|
|
76
|
-
|
77
|
-
file_info[name]={:engine=>engine , :database=>db_path }
|
104
|
+
file_info[name]={:engine=>engine , :database=>db_path, :enzyme=>enzyme }
|
78
105
|
}
|
79
106
|
|
80
|
-
# Check that all searches were performed with the same engine and
|
107
|
+
# Check that all searches were performed with the same engine, database and enzyme
|
81
108
|
#
|
82
109
|
#
|
83
110
|
engine=nil
|
84
111
|
database=nil
|
112
|
+
enzyme=nil
|
85
113
|
inputs=file_info.collect do |info|
|
86
114
|
if ( engine==nil)
|
87
115
|
engine=info[1][:engine]
|
@@ -89,7 +117,11 @@ inputs=file_info.collect do |info|
|
|
89
117
|
if ( database==nil)
|
90
118
|
database=info[1][:database]
|
91
119
|
end
|
92
|
-
|
120
|
+
if ( enzyme==nil)
|
121
|
+
enzyme=info[1][:enzyme]
|
122
|
+
end
|
123
|
+
|
124
|
+
throw "All files to be analyzed must have been searched with the same database and search engine" unless (info[1][:engine]==engine) && (info[1][:database]) && (info[1][:enzyme]==enzyme)
|
93
125
|
|
94
126
|
retname= info[0]
|
95
127
|
# if ( info[0]=~/\.dat$/)
|
@@ -100,13 +132,21 @@ inputs=file_info.collect do |info|
|
|
100
132
|
|
101
133
|
end
|
102
134
|
|
103
|
-
def generate_command(genv,prophet_tool,inputs,output,database,engine)
|
135
|
+
def generate_command(genv,prophet_tool,inputs,output,database,engine,enzyme)
|
104
136
|
|
105
|
-
|
137
|
+
enzyme_code = ProphetTool.xinteract_code_for_enzyme(enzyme)
|
138
|
+
|
139
|
+
throw "Unrecognized enzyme #{enzyme}" if enzyme_code.nil?
|
140
|
+
|
141
|
+
cmd="xinteract -N#{output} -l7 -e#{enzyme_code} -D'#{database}' "
|
106
142
|
|
107
143
|
# Do not produce png plots
|
108
144
|
cmd << " -Ot "
|
109
145
|
|
146
|
+
if prophet_tool.threads
|
147
|
+
cmd << " -THREADS=#{prophet_tool.threads}"
|
148
|
+
end
|
149
|
+
|
110
150
|
if prophet_tool.glyco
|
111
151
|
cmd << " -Og "
|
112
152
|
end
|
@@ -166,14 +206,11 @@ def generate_command(genv,prophet_tool,inputs,output,database,engine)
|
|
166
206
|
end
|
167
207
|
|
168
208
|
unless prophet_tool.no_decoys
|
169
|
-
|
170
|
-
if engine=="omssa" || engine=="phenyx"
|
171
|
-
cmd << " -Op -P -d#{prophet_tool.decoy_prefix} "
|
172
|
-
else
|
173
|
-
cmd << " -d#{prophet_tool.decoy_prefix} "
|
174
|
-
end
|
209
|
+
cmd << " -d#{prophet_tool.decoy_prefix} -Od "
|
175
210
|
end
|
176
211
|
|
212
|
+
cmd << " -p#{prophet_tool.probability_threshold}"
|
213
|
+
|
177
214
|
if ( inputs.class==Array)
|
178
215
|
cmd << " #{inputs.join(" ")}"
|
179
216
|
else
|
@@ -199,7 +236,7 @@ if ( prophet_tool.one_ata_time )
|
|
199
236
|
inputs.each do |input|
|
200
237
|
output_file_name=Tool.default_output_path(input,".pep.xml",prophet_tool.output_prefix,@output_suffix)
|
201
238
|
|
202
|
-
cmd=generate_command(genv,prophet_tool,input,output_file_name,database,engine)
|
239
|
+
cmd=generate_command(genv,prophet_tool,input,output_file_name,database,engine,enzyme)
|
203
240
|
run_peptide_prophet(genv,prophet_tool,cmd,output_file_name,engine)
|
204
241
|
end
|
205
242
|
|
@@ -211,7 +248,7 @@ else
|
|
211
248
|
output_file_name=prophet_tool.explicit_output
|
212
249
|
end
|
213
250
|
|
214
|
-
cmd=generate_command(genv,prophet_tool,inputs,output_file_name,database,engine)
|
251
|
+
cmd=generate_command(genv,prophet_tool,inputs,output_file_name,database,engine,enzyme)
|
215
252
|
run_peptide_prophet(genv,prophet_tool,cmd,output_file_name,engine)
|
216
253
|
|
217
254
|
end
|