scipion-pyworkflow 3.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyworkflow/__init__.py +33 -0
- pyworkflow/apps/__init__.py +29 -0
- pyworkflow/apps/pw_manager.py +37 -0
- pyworkflow/apps/pw_plot.py +51 -0
- pyworkflow/apps/pw_project.py +113 -0
- pyworkflow/apps/pw_protocol_list.py +143 -0
- pyworkflow/apps/pw_protocol_run.py +51 -0
- pyworkflow/apps/pw_run_tests.py +267 -0
- pyworkflow/apps/pw_schedule_run.py +322 -0
- pyworkflow/apps/pw_sleep.py +37 -0
- pyworkflow/apps/pw_sync_data.py +439 -0
- pyworkflow/apps/pw_viewer.py +78 -0
- pyworkflow/config.py +536 -0
- pyworkflow/constants.py +212 -0
- pyworkflow/exceptions.py +18 -0
- pyworkflow/gui/__init__.py +36 -0
- pyworkflow/gui/browser.py +726 -0
- pyworkflow/gui/canvas.py +1190 -0
- pyworkflow/gui/dialog.py +976 -0
- pyworkflow/gui/form.py +2627 -0
- pyworkflow/gui/graph.py +247 -0
- pyworkflow/gui/graph_layout.py +271 -0
- pyworkflow/gui/gui.py +566 -0
- pyworkflow/gui/matplotlib_image.py +233 -0
- pyworkflow/gui/plotter.py +247 -0
- pyworkflow/gui/project/__init__.py +25 -0
- pyworkflow/gui/project/base.py +192 -0
- pyworkflow/gui/project/constants.py +139 -0
- pyworkflow/gui/project/labels.py +205 -0
- pyworkflow/gui/project/project.py +484 -0
- pyworkflow/gui/project/searchprotocol.py +154 -0
- pyworkflow/gui/project/searchrun.py +181 -0
- pyworkflow/gui/project/steps.py +166 -0
- pyworkflow/gui/project/utils.py +332 -0
- pyworkflow/gui/project/variables.py +179 -0
- pyworkflow/gui/project/viewdata.py +472 -0
- pyworkflow/gui/project/viewprojects.py +510 -0
- pyworkflow/gui/project/viewprotocols.py +2093 -0
- pyworkflow/gui/project/viewprotocols_extra.py +560 -0
- pyworkflow/gui/text.py +771 -0
- pyworkflow/gui/tooltip.py +185 -0
- pyworkflow/gui/tree.py +684 -0
- pyworkflow/gui/widgets.py +307 -0
- pyworkflow/mapper/__init__.py +26 -0
- pyworkflow/mapper/mapper.py +222 -0
- pyworkflow/mapper/sqlite.py +1578 -0
- pyworkflow/mapper/sqlite_db.py +145 -0
- pyworkflow/object.py +1512 -0
- pyworkflow/plugin.py +712 -0
- pyworkflow/project/__init__.py +31 -0
- pyworkflow/project/config.py +451 -0
- pyworkflow/project/manager.py +179 -0
- pyworkflow/project/project.py +1990 -0
- pyworkflow/project/scripts/clean_projects.py +77 -0
- pyworkflow/project/scripts/config.py +92 -0
- pyworkflow/project/scripts/create.py +77 -0
- pyworkflow/project/scripts/edit_workflow.py +90 -0
- pyworkflow/project/scripts/fix_links.py +39 -0
- pyworkflow/project/scripts/load.py +87 -0
- pyworkflow/project/scripts/refresh.py +83 -0
- pyworkflow/project/scripts/schedule.py +111 -0
- pyworkflow/project/scripts/stack2volume.py +41 -0
- pyworkflow/project/scripts/stop.py +81 -0
- pyworkflow/protocol/__init__.py +38 -0
- pyworkflow/protocol/bibtex.py +48 -0
- pyworkflow/protocol/constants.py +86 -0
- pyworkflow/protocol/executor.py +334 -0
- pyworkflow/protocol/hosts.py +313 -0
- pyworkflow/protocol/launch.py +270 -0
- pyworkflow/protocol/package.py +42 -0
- pyworkflow/protocol/params.py +744 -0
- pyworkflow/protocol/protocol.py +2554 -0
- pyworkflow/resources/Imagej.png +0 -0
- pyworkflow/resources/chimera.png +0 -0
- pyworkflow/resources/fa-exclamation-triangle_alert.png +0 -0
- pyworkflow/resources/fa-info-circle_alert.png +0 -0
- pyworkflow/resources/fa-search.png +0 -0
- pyworkflow/resources/fa-times-circle_alert.png +0 -0
- pyworkflow/resources/file_vol.png +0 -0
- pyworkflow/resources/loading.gif +0 -0
- pyworkflow/resources/no-image128.png +0 -0
- pyworkflow/resources/scipion_bn.png +0 -0
- pyworkflow/resources/scipion_icon.png +0 -0
- pyworkflow/resources/scipion_icon.svg +397 -0
- pyworkflow/resources/scipion_icon_proj.png +0 -0
- pyworkflow/resources/scipion_icon_projs.png +0 -0
- pyworkflow/resources/scipion_icon_prot.png +0 -0
- pyworkflow/resources/scipion_logo.png +0 -0
- pyworkflow/resources/scipion_logo_normal.png +0 -0
- pyworkflow/resources/scipion_logo_small.png +0 -0
- pyworkflow/resources/sprites.png +0 -0
- pyworkflow/resources/sprites.xcf +0 -0
- pyworkflow/resources/wait.gif +0 -0
- pyworkflow/template.py +322 -0
- pyworkflow/tests/__init__.py +29 -0
- pyworkflow/tests/test_utils.py +25 -0
- pyworkflow/tests/tests.py +341 -0
- pyworkflow/utils/__init__.py +38 -0
- pyworkflow/utils/dataset.py +414 -0
- pyworkflow/utils/echo.py +104 -0
- pyworkflow/utils/graph.py +196 -0
- pyworkflow/utils/log.py +284 -0
- pyworkflow/utils/path.py +527 -0
- pyworkflow/utils/process.py +132 -0
- pyworkflow/utils/profiler.py +92 -0
- pyworkflow/utils/progressbar.py +154 -0
- pyworkflow/utils/properties.py +627 -0
- pyworkflow/utils/reflection.py +129 -0
- pyworkflow/utils/utils.py +877 -0
- pyworkflow/utils/which.py +229 -0
- pyworkflow/viewer.py +328 -0
- pyworkflow/webservices/__init__.py +8 -0
- pyworkflow/webservices/config.py +11 -0
- pyworkflow/webservices/notifier.py +162 -0
- pyworkflow/webservices/repository.py +59 -0
- pyworkflow/webservices/workflowhub.py +74 -0
- pyworkflow/wizard.py +64 -0
- pyworkflowtests/__init__.py +51 -0
- pyworkflowtests/bibtex.py +51 -0
- pyworkflowtests/objects.py +830 -0
- pyworkflowtests/protocols.py +154 -0
- pyworkflowtests/tests/__init__.py +0 -0
- pyworkflowtests/tests/test_canvas.py +72 -0
- pyworkflowtests/tests/test_domain.py +45 -0
- pyworkflowtests/tests/test_logs.py +74 -0
- pyworkflowtests/tests/test_mappers.py +392 -0
- pyworkflowtests/tests/test_object.py +507 -0
- pyworkflowtests/tests/test_project.py +42 -0
- pyworkflowtests/tests/test_protocol_execution.py +72 -0
- pyworkflowtests/tests/test_protocol_export.py +78 -0
- pyworkflowtests/tests/test_protocol_output.py +158 -0
- pyworkflowtests/tests/test_streaming.py +47 -0
- pyworkflowtests/tests/test_utils.py +210 -0
- scipion_pyworkflow-3.7.0.dist-info/LICENSE.txt +674 -0
- scipion_pyworkflow-3.7.0.dist-info/METADATA +107 -0
- scipion_pyworkflow-3.7.0.dist-info/RECORD +140 -0
- scipion_pyworkflow-3.7.0.dist-info/WHEEL +5 -0
- scipion_pyworkflow-3.7.0.dist-info/dependency_links.txt +1 -0
- scipion_pyworkflow-3.7.0.dist-info/entry_points.txt +5 -0
- scipion_pyworkflow-3.7.0.dist-info/top_level.txt +2 -0
@@ -0,0 +1,313 @@
|
|
1
|
+
# **************************************************************************
|
2
|
+
# *
|
3
|
+
# * Authors: J.M. De la Rosa Trevin (jmdelarosa@cnb.csic.es)
|
4
|
+
# *
|
5
|
+
# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia , CSIC
|
6
|
+
# *
|
7
|
+
# * This program is free software; you can redistribute it and/or modify
|
8
|
+
# * it under the terms of the GNU General Public License as published by
|
9
|
+
# * the Free Software Foundation; either version 3 of the License, or
|
10
|
+
# * (at your option) any later version.
|
11
|
+
# *
|
12
|
+
# * This program is distributed in the hope that it will be useful,
|
13
|
+
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14
|
+
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
15
|
+
# * GNU General Public License for more details.
|
16
|
+
# *
|
17
|
+
# * You should have received a copy of the GNU General Public License
|
18
|
+
# * along with this program; if not, write to the Free Software
|
19
|
+
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
20
|
+
# * 02111-1307 USA
|
21
|
+
# *
|
22
|
+
# * All comments concerning this program package may be sent to the
|
23
|
+
# * e-mail address 'scipion@cnb.csic.es'
|
24
|
+
# *
|
25
|
+
# **************************************************************************
|
26
|
+
"""
|
27
|
+
This modules contains classes to store information about
|
28
|
+
execution hosts.
|
29
|
+
"""
|
30
|
+
|
31
|
+
|
32
|
+
import os
|
33
|
+
import sys
|
34
|
+
import json
|
35
|
+
from configparser import RawConfigParser
|
36
|
+
from collections import OrderedDict
|
37
|
+
|
38
|
+
import pyworkflow as pw
|
39
|
+
from pyworkflow.object import Object, String, Integer
|
40
|
+
|
41
|
+
|
42
|
+
class HostConfig(Object):
|
43
|
+
""" Main store the configuration for execution hosts. """
|
44
|
+
|
45
|
+
def __init__(self, **kwargs):
|
46
|
+
super().__init__(**kwargs)
|
47
|
+
self.label = String(kwargs.get('label', None))
|
48
|
+
self.hostName = String(kwargs.get('hostName', None))
|
49
|
+
self.userName = String()
|
50
|
+
self.password = String()
|
51
|
+
self.hostPath = String()
|
52
|
+
self.mpiCommand = String()
|
53
|
+
self.scipionHome = String()
|
54
|
+
self.scipionConfig = String()
|
55
|
+
self.address = String()
|
56
|
+
self.queueSystem = QueueSystemConfig()
|
57
|
+
|
58
|
+
def getLabel(self):
|
59
|
+
return self.label.get()
|
60
|
+
|
61
|
+
def getHostName(self):
|
62
|
+
return self.hostName.get()
|
63
|
+
|
64
|
+
def getUserName(self):
|
65
|
+
return self.userName.get()
|
66
|
+
|
67
|
+
def getPassword(self):
|
68
|
+
return self.password.get()
|
69
|
+
|
70
|
+
def getHostPath(self):
|
71
|
+
return self.hostPath.get()
|
72
|
+
|
73
|
+
def getSubmitCommand(self):
|
74
|
+
return self.queueSystem.submitCommand.get()
|
75
|
+
|
76
|
+
def getSubmitPrefix(self):
|
77
|
+
return self.queueSystem.submitPrefix.get()
|
78
|
+
|
79
|
+
def getCheckCommand(self):
|
80
|
+
return self.queueSystem.checkCommand.get()
|
81
|
+
|
82
|
+
def getCancelCommand(self):
|
83
|
+
return self.queueSystem.cancelCommand.get()
|
84
|
+
|
85
|
+
def isQueueMandatory(self):
|
86
|
+
return self.queueSystem.mandatory.get()
|
87
|
+
|
88
|
+
def getSubmitTemplate(self):
|
89
|
+
return self.queueSystem.getSubmitTemplate()
|
90
|
+
|
91
|
+
def getQueuesDefault(self):
|
92
|
+
return self.queueSystem.queuesDefault
|
93
|
+
|
94
|
+
def getMpiCommand(self):
|
95
|
+
return self.mpiCommand.get()
|
96
|
+
|
97
|
+
def getQueueSystem(self):
|
98
|
+
return self.queueSystem
|
99
|
+
|
100
|
+
def getJobDoneRegex(self):
|
101
|
+
return self.queueSystem.jobDoneRegex.get()
|
102
|
+
|
103
|
+
def setLabel(self, label):
|
104
|
+
self.label.set(label)
|
105
|
+
|
106
|
+
def setHostName(self, hostName):
|
107
|
+
self.hostName.set(hostName)
|
108
|
+
|
109
|
+
def setUserName(self, userName):
|
110
|
+
self.userName.set(userName)
|
111
|
+
|
112
|
+
def setPassword(self, password):
|
113
|
+
self.password.set(password)
|
114
|
+
|
115
|
+
def setHostPath(self, hostPath):
|
116
|
+
self.hostPath.set(hostPath)
|
117
|
+
|
118
|
+
def setMpiCommand(self, mpiCommand):
|
119
|
+
self.mpiCommand.set(mpiCommand)
|
120
|
+
|
121
|
+
def setQueueSystem(self, queueSystem):
|
122
|
+
self.queueSystem = queueSystem
|
123
|
+
|
124
|
+
def getScipionHome(self):
|
125
|
+
""" Return the path where Scipion is installed in
|
126
|
+
the host.
|
127
|
+
"""
|
128
|
+
return self.scipionHome.get()
|
129
|
+
|
130
|
+
def setScipionHome(self, newScipionHome):
|
131
|
+
self.scipionHome.set(newScipionHome)
|
132
|
+
|
133
|
+
def getScipionConfig(self):
|
134
|
+
""" From which file to read the configuration file in
|
135
|
+
this hosts.
|
136
|
+
"""
|
137
|
+
return self.scipionConfig.get()
|
138
|
+
|
139
|
+
def setScipionConfig(self, newConfig):
|
140
|
+
self.scipionConfig.set(newConfig)
|
141
|
+
|
142
|
+
def getAddress(self):
|
143
|
+
return self.address.get()
|
144
|
+
|
145
|
+
def setAddress(self, newAddress):
|
146
|
+
return self.address.set(newAddress)
|
147
|
+
|
148
|
+
@classmethod
|
149
|
+
def writeBasic(cls, configFn):
|
150
|
+
""" Write a very basic Host configuration for testing purposes. """
|
151
|
+
with open(configFn, 'w') as f:
|
152
|
+
f.write('[localhost]\nPARALLEL_COMMAND = '
|
153
|
+
'mpirun -np %_(JOB_NODES)d --map-by node %_(COMMAND)s\n')
|
154
|
+
|
155
|
+
@classmethod
|
156
|
+
def load(cls, hostsConf):
|
157
|
+
""" Load several hosts from a configuration file.
|
158
|
+
Return an dictionary with hostName -> hostConfig pairs.
|
159
|
+
"""
|
160
|
+
# Read from users' config file. Raw to avoid interpolation of %: we expect %_
|
161
|
+
cp = RawConfigParser(comment_prefixes=";")
|
162
|
+
cp.optionxform = str # keep case (stackoverflow.com/questions/1611799)
|
163
|
+
hosts = OrderedDict()
|
164
|
+
|
165
|
+
try:
|
166
|
+
assert cp.read(hostsConf) != [], 'Missing file %s' % hostsConf
|
167
|
+
|
168
|
+
for hostName in cp.sections():
|
169
|
+
host = HostConfig(label=hostName, hostName=hostName)
|
170
|
+
host.setHostPath(pw.Config.SCIPION_USER_DATA)
|
171
|
+
|
172
|
+
# Helper functions (to write less)
|
173
|
+
def get(var, default=None):
|
174
|
+
if cp.has_option(hostName, var):
|
175
|
+
|
176
|
+
value = cp.get(hostName, var)
|
177
|
+
# Rescue python2.7 behaviour: ## at the beginning of a line, means a single #.
|
178
|
+
# https://github.com/scipion-em/scipion-pyworkflow/issues/70
|
179
|
+
value = value.replace("\n##", "\n#")
|
180
|
+
|
181
|
+
# Keep compatibility: %_ --> %%
|
182
|
+
value = value.replace('%_(', '%(')
|
183
|
+
|
184
|
+
return value
|
185
|
+
else:
|
186
|
+
return default
|
187
|
+
|
188
|
+
def getDict(var):
|
189
|
+
od = OrderedDict()
|
190
|
+
|
191
|
+
if cp.has_option(hostName, var):
|
192
|
+
for key, value in json.loads(get(var)).items():
|
193
|
+
od[key] = value
|
194
|
+
|
195
|
+
return od
|
196
|
+
|
197
|
+
host.setScipionHome(get(pw.SCIPION_HOME_VAR, pw.Config.SCIPION_HOME))
|
198
|
+
host.setScipionConfig(pw.Config.SCIPION_CONFIG)
|
199
|
+
# Read the address of the remote hosts,
|
200
|
+
# using 'localhost' as default for backward compatibility
|
201
|
+
host.setAddress(get('ADDRESS', 'localhost'))
|
202
|
+
host.mpiCommand.set(get('PARALLEL_COMMAND'))
|
203
|
+
host.queueSystem = QueueSystemConfig()
|
204
|
+
hostQueue = host.queueSystem # shortcut
|
205
|
+
hostQueue.name.set(get('NAME'))
|
206
|
+
|
207
|
+
# If the NAME is not provided or empty
|
208
|
+
# do no try to parse the rest of Queue parameters
|
209
|
+
if hostQueue.hasName():
|
210
|
+
hostQueue.setMandatory(get('MANDATORY', 0))
|
211
|
+
hostQueue.submitPrefix.set(get('SUBMIT_PREFIX', ''))
|
212
|
+
hostQueue.submitCommand.set(get('SUBMIT_COMMAND'))
|
213
|
+
hostQueue.submitTemplate.set(get('SUBMIT_TEMPLATE'))
|
214
|
+
hostQueue.cancelCommand.set(get('CANCEL_COMMAND'))
|
215
|
+
hostQueue.checkCommand.set(get('CHECK_COMMAND'))
|
216
|
+
hostQueue.jobDoneRegex.set(get('JOB_DONE_REGEX'))
|
217
|
+
hostQueue.queues = getDict('QUEUES')
|
218
|
+
hostQueue.queuesDefault = getDict('QUEUES_DEFAULT')
|
219
|
+
|
220
|
+
hosts[hostName] = host
|
221
|
+
|
222
|
+
return hosts
|
223
|
+
except Exception as e:
|
224
|
+
sys.exit('Failed to read settings. The reported error was:\n %s\n'
|
225
|
+
'Review %s and run again.'
|
226
|
+
% (e, os.path.abspath(hostsConf)))
|
227
|
+
|
228
|
+
|
229
|
+
class QueueSystemConfig(Object):
|
230
|
+
def __init__(self, **kwargs):
|
231
|
+
super().__init__(**kwargs)
|
232
|
+
self.name = String()
|
233
|
+
# Number of cores from which the queue is mandatory
|
234
|
+
# 0 means no mandatory at all
|
235
|
+
# 1 will force to launch all jobs through the queue
|
236
|
+
self.mandatory = Integer()
|
237
|
+
self.queues = None # List for queue configurations
|
238
|
+
self.submitCommand = String()
|
239
|
+
# Allow to change the prefix of submission scripts
|
240
|
+
# we used by default the ID.job, but in some clusters
|
241
|
+
# the job script should start by a letter
|
242
|
+
self.submitPrefix = String()
|
243
|
+
self.checkCommand = String()
|
244
|
+
self.cancelCommand = String()
|
245
|
+
self.submitTemplate = String()
|
246
|
+
self.jobDoneRegex = String()
|
247
|
+
|
248
|
+
def hasName(self):
|
249
|
+
return self.name.hasValue()
|
250
|
+
|
251
|
+
def hasValue(self):
|
252
|
+
return self.hasName() and len(self.queues)
|
253
|
+
|
254
|
+
def getName(self):
|
255
|
+
return self.name.get()
|
256
|
+
|
257
|
+
def getMandatory(self):
|
258
|
+
return self.mandatory.get()
|
259
|
+
|
260
|
+
def getSubmitTemplate(self):
|
261
|
+
return self.submitTemplate.get()
|
262
|
+
|
263
|
+
def getSubmitCommand(self):
|
264
|
+
return self.submitCommand.get()
|
265
|
+
|
266
|
+
def getCheckCommand(self):
|
267
|
+
return self.checkCommand.get()
|
268
|
+
|
269
|
+
def getCancelCommand(self):
|
270
|
+
return self.cancelCommand.get()
|
271
|
+
|
272
|
+
def getQueues(self):
|
273
|
+
return self.queues
|
274
|
+
|
275
|
+
def setName(self, name):
|
276
|
+
self.name.set(name)
|
277
|
+
|
278
|
+
def setMandatory(self, mandatory):
|
279
|
+
# This condition is to be backward compatible
|
280
|
+
# when mandatory was a boolean
|
281
|
+
# now it should use the number of CPU
|
282
|
+
# that should force to use the queue
|
283
|
+
if mandatory in ['False', 'false']:
|
284
|
+
mandatory = 0
|
285
|
+
elif mandatory in ['True', 'true']:
|
286
|
+
mandatory = 1
|
287
|
+
|
288
|
+
self.mandatory.set(mandatory)
|
289
|
+
|
290
|
+
def setSubmitTemplate(self, submitTemplate):
|
291
|
+
self.submitTemplate.set(submitTemplate)
|
292
|
+
|
293
|
+
def setSubmitCommand(self, submitCommand):
|
294
|
+
self.submitCommand.set(submitCommand)
|
295
|
+
|
296
|
+
def setCheckCommand(self, checkCommand):
|
297
|
+
self.checkCommand.set(checkCommand)
|
298
|
+
|
299
|
+
def setCancelCommand(self, cancelCommand):
|
300
|
+
self.cancelCommand.set(cancelCommand)
|
301
|
+
|
302
|
+
def setJobDoneRegex(self, jobDoneRegex):
|
303
|
+
self.jobDoneRegex.set(jobDoneRegex)
|
304
|
+
|
305
|
+
def setQueues(self, queues):
|
306
|
+
self.queues = queues
|
307
|
+
|
308
|
+
def getQueueConfig(self, objId):
|
309
|
+
if objId is not None and self.queues is not None:
|
310
|
+
for queueConfig in self.queues:
|
311
|
+
if objId == queueConfig.getObjId():
|
312
|
+
return queueConfig
|
313
|
+
return None
|
@@ -0,0 +1,270 @@
|
|
1
|
+
# **************************************************************************
|
2
|
+
# *
|
3
|
+
# * Authors: J.M. De la Rosa Trevin (jmdelarosa@cnb.csic.es)
|
4
|
+
# *
|
5
|
+
# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia , CSIC
|
6
|
+
# *
|
7
|
+
# * This program is free software; you can redistribute it and/or modify
|
8
|
+
# * it under the terms of the GNU General Public License as published by
|
9
|
+
# * the Free Software Foundation; either version 3 of the License, or
|
10
|
+
# * (at your option) any later version.
|
11
|
+
# *
|
12
|
+
# * This program is distributed in the hope that it will be useful,
|
13
|
+
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14
|
+
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
15
|
+
# * GNU General Public License for more details.
|
16
|
+
# *
|
17
|
+
# * You should have received a copy of the GNU General Public License
|
18
|
+
# * along with this program; if not, write to the Free Software
|
19
|
+
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
20
|
+
# * 02111-1307 USA
|
21
|
+
# *
|
22
|
+
# * All comments concerning this program package may be sent to the
|
23
|
+
# * e-mail address 'scipion@cnb.csic.es'
|
24
|
+
# *
|
25
|
+
# **************************************************************************
|
26
|
+
"""
|
27
|
+
This module is responsible for launching local protocol executions:
|
28
|
+
|
29
|
+
1- Check if the protocol will be submitted to a queue (using Queue template from config)
|
30
|
+
2- Build the command that will be launched.
|
31
|
+
|
32
|
+
"""
|
33
|
+
|
34
|
+
import os
|
35
|
+
import re
|
36
|
+
import logging
|
37
|
+
logger = logging.getLogger(__file__)
|
38
|
+
from subprocess import Popen, PIPE
|
39
|
+
import pyworkflow as pw
|
40
|
+
from pyworkflow.exceptions import PyworkflowException
|
41
|
+
from pyworkflow.utils import (redStr, greenStr, makeFilePath, join, process,
|
42
|
+
getHostFullName)
|
43
|
+
from pyworkflow.protocol.constants import UNKNOWN_JOBID, STATUS_FAILED, STATUS_FINISHED, STATUS_RUNNING
|
44
|
+
|
45
|
+
|
46
|
+
# ******************************************************************
|
47
|
+
# * Public functions provided by the module
|
48
|
+
# ******************************************************************
|
49
|
+
|
50
|
+
def launch(protocol, wait=False, stdin=None, stdout=None, stderr=None):
|
51
|
+
""" This function should be used to launch a protocol. """
|
52
|
+
_launchLocal(protocol, wait, stdin, stdout, stderr)
|
53
|
+
|
54
|
+
|
55
|
+
def stop(protocol):
|
56
|
+
"""
|
57
|
+
Stop function for three scenarios:
|
58
|
+
- If the queue is not used, kill the main protocol process and its child processes.
|
59
|
+
- If the queue is used and the entire protocol is sent to the queue, cancel the job running the protocol using
|
60
|
+
scancel.
|
61
|
+
- If the queue is used and individual steps are sent to the queue, cancel all active jobs and kill the main protocol
|
62
|
+
process and its child processes.
|
63
|
+
"""
|
64
|
+
if protocol.useQueue() and not protocol.isScheduled():
|
65
|
+
jobIds = protocol.getJobIds()
|
66
|
+
for jobId in jobIds: # Iter even though it contains only one jobId
|
67
|
+
host = protocol.getHostConfig()
|
68
|
+
cancelCmd = host.getCancelCommand() % {'JOB_ID': jobId}
|
69
|
+
logger.info(cancelCmd)
|
70
|
+
_run(cancelCmd, wait=True)
|
71
|
+
|
72
|
+
if protocol.useQueueForSteps():
|
73
|
+
process.killWithChilds(protocol.getPid())
|
74
|
+
else:
|
75
|
+
process.killWithChilds(protocol.getPid())
|
76
|
+
|
77
|
+
|
78
|
+
def schedule(protocol, initialSleepTime=0, wait=False):
|
79
|
+
""" Use this function to schedule protocols that are not ready to
|
80
|
+
run yet. Right now it only make sense to schedule jobs locally.
|
81
|
+
"""
|
82
|
+
cmd = '%s %s' % (pw.PYTHON, pw.getScheduleScript())
|
83
|
+
cmd += ' "%s" "%s" %s "%s" --initial_sleep %s' % (protocol.getProject().path,
|
84
|
+
protocol.getDbPath(),
|
85
|
+
protocol.strId(),
|
86
|
+
protocol.getScheduleLog(),
|
87
|
+
initialSleepTime)
|
88
|
+
pid = _run(cmd, wait)
|
89
|
+
protocol.setPid(pid) # Set correctly the pid
|
90
|
+
|
91
|
+
|
92
|
+
# ******************************************************************
|
93
|
+
# * Internal utility functions
|
94
|
+
# ******************************************************************
|
95
|
+
def _runsLocally(protocol):
|
96
|
+
""" Return True if this protocol is running in this machine,
|
97
|
+
where the PID makes sense.
|
98
|
+
"""
|
99
|
+
return protocol.getHostFullName() == getHostFullName()
|
100
|
+
|
101
|
+
|
102
|
+
# ******************************************************************
|
103
|
+
# * Function related to LAUNCH
|
104
|
+
# ******************************************************************
|
105
|
+
def _getAppsProgram(prog):
|
106
|
+
""" Get a command to launch a program under the apps folder.
|
107
|
+
"""
|
108
|
+
return "%s %s" % (pw.PYTHON, pw.join(pw.APPS, prog))
|
109
|
+
|
110
|
+
|
111
|
+
def _launchLocal(protocol, wait, stdin=None, stdout=None, stderr=None):
|
112
|
+
"""
|
113
|
+
|
114
|
+
:param protocol: Protocol to launch
|
115
|
+
:param wait: Pass true if you want to wait for the process to finish
|
116
|
+
:param stdin: stdin object to direct stdin to
|
117
|
+
:param stdout: stdout object to send process stdout
|
118
|
+
:param stderr: stderr object to send process stderr
|
119
|
+
"""
|
120
|
+
|
121
|
+
command = '{python} {prot_run} "{project_path}" "{db_path}" {prot_id} "{stdout_log}" "{stderr_log}"'.format(
|
122
|
+
python=pw.PYTHON,
|
123
|
+
prot_run=pw.join(pw.APPS, 'pw_protocol_run.py'),
|
124
|
+
project_path=protocol.getProject().path,
|
125
|
+
db_path=protocol.getDbPath(),
|
126
|
+
prot_id=protocol.strId(),
|
127
|
+
# We make them absolute in case working dir is not passed to the node when running through a queue.
|
128
|
+
# The reason is that since 3.0.27, the first thing that is affected by the current working dir is the
|
129
|
+
# creation of the logs. Before event than loading the project, which was and is setting the working dir to
|
130
|
+
# the project path. IMPORTANT: This assumes the paths before the queue and after the queue (nodes) are the same
|
131
|
+
# Which I think is safe since we are passing here "project_path" that is absolute.
|
132
|
+
stdout_log=os.path.abspath(protocol.getStdoutLog()),
|
133
|
+
stderr_log=os.path.abspath(protocol.getStderrLog())
|
134
|
+
)
|
135
|
+
|
136
|
+
hostConfig = protocol.getHostConfig()
|
137
|
+
|
138
|
+
# Clean Pid and JobIds
|
139
|
+
protocol.cleanExecutionAttributes()
|
140
|
+
protocol._store(protocol._jobId)
|
141
|
+
|
142
|
+
# Handle three use cases: one will use the job ID, and the other two will use the process ID.
|
143
|
+
if protocol.useQueueForProtocol(): # Retrieve the job ID and set it; this will be used to control the protocol.
|
144
|
+
submitDict = dict(hostConfig.getQueuesDefault())
|
145
|
+
submitDict.update(protocol.getSubmitDict())
|
146
|
+
submitDict['JOB_COMMAND'] = command
|
147
|
+
jobId = _submit(hostConfig, submitDict)
|
148
|
+
if jobId is None or jobId == UNKNOWN_JOBID:
|
149
|
+
protocol.setStatus(STATUS_FAILED)
|
150
|
+
else:
|
151
|
+
protocol.setJobId(jobId)
|
152
|
+
protocol.setPid(0) # we go through the queue, so we rely on the jobId
|
153
|
+
else: # If not, retrieve and set the process ID (both for normal execution or when using the queue for steps)
|
154
|
+
pId = _run(command, wait, stdin, stdout, stderr)
|
155
|
+
protocol.setPid(pId)
|
156
|
+
|
157
|
+
|
158
|
+
def analyzeFormattingTypeError(string, dictionary):
|
159
|
+
""" receives a string with %(VARS) to be replaced with a dictionary
|
160
|
+
it splits te string by \n and test the formatting per line. Raises an exception if any line fails
|
161
|
+
with all problems found"""
|
162
|
+
|
163
|
+
# Do the replacement line by line
|
164
|
+
lines = string.split("\n")
|
165
|
+
|
166
|
+
problematicLines = []
|
167
|
+
for line in lines:
|
168
|
+
try:
|
169
|
+
line % dictionary
|
170
|
+
except KeyError as e:
|
171
|
+
problematicLines.append(line + " --> variable not present in this context.")
|
172
|
+
except Exception as e:
|
173
|
+
problematicLines.append(line + " --> " + str(e))
|
174
|
+
|
175
|
+
if problematicLines:
|
176
|
+
return PyworkflowException('Following lines in %s seems to be problematic.\n'
|
177
|
+
'Values known in this context are: \n%s'
|
178
|
+
'Please review its format or content.\n%s' % (dictionary, pw.Config.SCIPION_HOSTS, "\n".join(problematicLines)),
|
179
|
+
url=pw.DOCSITEURLS.HOST_CONFIG)
|
180
|
+
|
181
|
+
|
182
|
+
def _submit(hostConfig, submitDict, cwd=None, env=None):
|
183
|
+
""" Submit a protocol to a queue system. Return its job id.
|
184
|
+
"""
|
185
|
+
# Create first the submission script to be launched
|
186
|
+
# formatting using the template
|
187
|
+
template = hostConfig.getSubmitTemplate()
|
188
|
+
|
189
|
+
try:
|
190
|
+
template = template % submitDict
|
191
|
+
except Exception as e:
|
192
|
+
# Capture parsing errors
|
193
|
+
exception = analyzeFormattingTypeError(template, submitDict)
|
194
|
+
|
195
|
+
if exception:
|
196
|
+
raise exception
|
197
|
+
else:
|
198
|
+
# If there is no exception, then raise actual one
|
199
|
+
raise e
|
200
|
+
|
201
|
+
# FIXME: CREATE THE PATH FIRST
|
202
|
+
scripPath = submitDict['JOB_SCRIPT']
|
203
|
+
f = open(scripPath, 'w')
|
204
|
+
# Ensure the path exists
|
205
|
+
makeFilePath(scripPath)
|
206
|
+
# Add some line ends because in some clusters it fails
|
207
|
+
# to submit jobs if the submit script does not have end of line
|
208
|
+
f.write(template + '\n\n')
|
209
|
+
f.close()
|
210
|
+
# This should format the command using a template like:
|
211
|
+
# "qsub %(JOB_SCRIPT)s"
|
212
|
+
command = hostConfig.getSubmitCommand() % submitDict
|
213
|
+
gcmd = greenStr(command)
|
214
|
+
logger.info("** Submitting to queue: '%s'" % gcmd)
|
215
|
+
|
216
|
+
p = Popen(command, shell=True, stdout=PIPE, cwd=cwd, env=env)
|
217
|
+
out = p.communicate()[0]
|
218
|
+
# Try to parse the result of qsub, searching for a number (jobId)
|
219
|
+
# Review this, seems to exclusive to torque batch system
|
220
|
+
s = re.search(r'(\d+)', str(out))
|
221
|
+
if p.returncode == 0 and s:
|
222
|
+
job = int(s.group(0))
|
223
|
+
logger.info("Launched job with id %s" % job)
|
224
|
+
return job
|
225
|
+
else:
|
226
|
+
logger.info("Couldn't submit to queue for reason: %s " % redStr(out.decode()))
|
227
|
+
return UNKNOWN_JOBID
|
228
|
+
|
229
|
+
def _checkJobStatus(hostConfig, jobid):
|
230
|
+
"""
|
231
|
+
General method to verify the job status in the queue based on the jobId and host.conf CHECK_COMMAND
|
232
|
+
returns: STATUS_FINISHED (finished) or STATUS_RUNNING (running)
|
233
|
+
"""
|
234
|
+
command = hostConfig.getCheckCommand() % {"JOB_ID": jobid}
|
235
|
+
logger.debug("checking job status for %s: %s" % (jobid, command))
|
236
|
+
|
237
|
+
p = Popen(command, shell=True, stdout=PIPE, preexec_fn=os.setsid)
|
238
|
+
|
239
|
+
out = p.communicate()[0].decode(errors='backslashreplace')
|
240
|
+
|
241
|
+
jobDoneRegex = hostConfig.getJobDoneRegex()
|
242
|
+
logger.debug("Queue engine replied %s, variable JOB_DONE_REGEX has %s" % (out, jobDoneRegex))
|
243
|
+
# If nothing is returned we assume job is no longer in queue and thus finished
|
244
|
+
if out == "":
|
245
|
+
logger.warning("Empty response from queue system to job (%s)" % jobid)
|
246
|
+
return STATUS_FINISHED
|
247
|
+
|
248
|
+
# If some string is returned we use the JOB_DONE_REGEX variable (if present) to infer the status
|
249
|
+
elif jobDoneRegex is not None:
|
250
|
+
s = re.search(jobDoneRegex, out)
|
251
|
+
if s:
|
252
|
+
logger.debug("Job (%s) finished" % jobid)
|
253
|
+
return STATUS_FINISHED
|
254
|
+
else:
|
255
|
+
logger.debug("Job (%s) still running" % jobid)
|
256
|
+
return STATUS_RUNNING
|
257
|
+
# If JOB_DONE_REGEX is not defined and queue has returned something we assume that job is still running
|
258
|
+
else:
|
259
|
+
return STATUS_RUNNING
|
260
|
+
|
261
|
+
def _run(command, wait, stdin=None, stdout=None, stderr=None):
|
262
|
+
""" Execute a command in a subprocess and return the pid. """
|
263
|
+
gcmd = greenStr(command)
|
264
|
+
logger.info("** Running command: '%s'" % gcmd)
|
265
|
+
p = Popen(command, shell=True, stdout=stdout, stderr=stderr)
|
266
|
+
pid = p.pid
|
267
|
+
if wait:
|
268
|
+
p.wait()
|
269
|
+
|
270
|
+
return pid
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# **************************************************************************
|
2
|
+
# *
|
3
|
+
# * Authors: P. Conesa Mingo (pconesa@cnb.csic.es)
|
4
|
+
# *
|
5
|
+
# * Unidad de Bioinformatica of Centro Nacional de Biotecnologia , CSIC
|
6
|
+
# *
|
7
|
+
# * This program is free software; you can redistribute it and/or modify
|
8
|
+
# * it under the terms of the GNU General Public License as published by
|
9
|
+
# * the Free Software Foundation; either version 3 of the License, or
|
10
|
+
# * (at your option) any later version.
|
11
|
+
# *
|
12
|
+
# * This program is distributed in the hope that it will be useful,
|
13
|
+
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14
|
+
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
15
|
+
# * GNU General Public License for more details.
|
16
|
+
# *
|
17
|
+
# * You should have received a copy of the GNU General Public License
|
18
|
+
# * along with this program; if not, write to the Free Software
|
19
|
+
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
20
|
+
# * 02111-1307 USA
|
21
|
+
# *
|
22
|
+
# * All comments concerning this program package may be sent to the
|
23
|
+
# * e-mail address 'scipion@cnb.csic.es'
|
24
|
+
# *
|
25
|
+
# **************************************************************************
|
26
|
+
|
27
|
+
|
28
|
+
"""
|
29
|
+
This modules contains classes related to EM package management
|
30
|
+
"""
|
31
|
+
|
32
|
+
|
33
|
+
class Package:
|
34
|
+
def __init__(self, name, installed):
|
35
|
+
self._name = name
|
36
|
+
self._installed = installed
|
37
|
+
|
38
|
+
def isInstalled(self):
|
39
|
+
return self._installed
|
40
|
+
|
41
|
+
def getName(self):
|
42
|
+
return self._name
|