scipion-pyworkflow 3.10.6__py3-none-any.whl → 3.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyworkflow/config.py +131 -67
- pyworkflow/constants.py +2 -1
- pyworkflow/plugin.py +93 -44
- pyworkflow/resources/showj/arrowDown.png +0 -0
- pyworkflow/resources/showj/arrowUp.png +0 -0
- pyworkflow/resources/showj/background_section.png +0 -0
- pyworkflow/resources/showj/colRowModeOff.png +0 -0
- pyworkflow/resources/showj/colRowModeOn.png +0 -0
- pyworkflow/resources/showj/delete.png +0 -0
- pyworkflow/resources/showj/doc_icon.png +0 -0
- pyworkflow/resources/showj/download_icon.png +0 -0
- pyworkflow/resources/showj/enabled_gallery.png +0 -0
- pyworkflow/resources/showj/galleryViewOff.png +0 -0
- pyworkflow/resources/showj/galleryViewOn.png +0 -0
- pyworkflow/resources/showj/goto.png +0 -0
- pyworkflow/resources/showj/menu.png +0 -0
- pyworkflow/resources/showj/separator.png +0 -0
- pyworkflow/resources/showj/tableViewOff.png +0 -0
- pyworkflow/resources/showj/tableViewOn.png +0 -0
- pyworkflow/resources/showj/ui-bg_glass_75_e6e6e6_1x400.png +0 -0
- pyworkflow/resources/showj/ui-bg_glass_95_fef1ec_1x400.png +0 -0
- pyworkflow/resources/showj/ui-bg_highlight-soft_75_cccccc_1x100.png +0 -0
- pyworkflow/resources/showj/volumeOff.png +0 -0
- pyworkflow/resources/showj/volumeOn.png +0 -0
- pyworkflow/viewer.py +23 -1
- pyworkflowtests/protocols.py +1 -3
- {scipion_pyworkflow-3.10.6.dist-info → scipion_pyworkflow-3.11.0.dist-info}/METADATA +13 -27
- scipion_pyworkflow-3.11.0.dist-info/RECORD +71 -0
- {scipion_pyworkflow-3.10.6.dist-info → scipion_pyworkflow-3.11.0.dist-info}/WHEEL +1 -1
- pyworkflow/apps/__init__.py +0 -29
- pyworkflow/apps/pw_manager.py +0 -37
- pyworkflow/apps/pw_plot.py +0 -51
- pyworkflow/apps/pw_project.py +0 -113
- pyworkflow/apps/pw_protocol_list.py +0 -143
- pyworkflow/apps/pw_protocol_run.py +0 -51
- pyworkflow/apps/pw_run_tests.py +0 -267
- pyworkflow/apps/pw_schedule_run.py +0 -322
- pyworkflow/apps/pw_sleep.py +0 -37
- pyworkflow/apps/pw_sync_data.py +0 -439
- pyworkflow/apps/pw_viewer.py +0 -78
- pyworkflow/gui/__init__.py +0 -36
- pyworkflow/gui/browser.py +0 -726
- pyworkflow/gui/canvas.py +0 -1190
- pyworkflow/gui/dialog.py +0 -977
- pyworkflow/gui/form.py +0 -2637
- pyworkflow/gui/graph.py +0 -247
- pyworkflow/gui/graph_layout.py +0 -271
- pyworkflow/gui/gui.py +0 -566
- pyworkflow/gui/matplotlib_image.py +0 -233
- pyworkflow/gui/plotter.py +0 -247
- pyworkflow/gui/project/__init__.py +0 -25
- pyworkflow/gui/project/base.py +0 -192
- pyworkflow/gui/project/constants.py +0 -139
- pyworkflow/gui/project/labels.py +0 -205
- pyworkflow/gui/project/project.py +0 -492
- pyworkflow/gui/project/searchprotocol.py +0 -154
- pyworkflow/gui/project/searchrun.py +0 -181
- pyworkflow/gui/project/steps.py +0 -171
- pyworkflow/gui/project/utils.py +0 -332
- pyworkflow/gui/project/variables.py +0 -179
- pyworkflow/gui/project/viewdata.py +0 -472
- pyworkflow/gui/project/viewprojects.py +0 -510
- pyworkflow/gui/project/viewprotocols.py +0 -2093
- pyworkflow/gui/project/viewprotocols_extra.py +0 -559
- pyworkflow/gui/text.py +0 -771
- pyworkflow/gui/tooltip.py +0 -185
- pyworkflow/gui/tree.py +0 -684
- pyworkflow/gui/widgets.py +0 -307
- pyworkflow/mapper/__init__.py +0 -26
- pyworkflow/mapper/mapper.py +0 -222
- pyworkflow/mapper/sqlite.py +0 -1581
- pyworkflow/mapper/sqlite_db.py +0 -145
- pyworkflow/project/__init__.py +0 -31
- pyworkflow/project/config.py +0 -454
- pyworkflow/project/manager.py +0 -180
- pyworkflow/project/project.py +0 -2007
- pyworkflow/protocol/__init__.py +0 -38
- pyworkflow/protocol/bibtex.py +0 -48
- pyworkflow/protocol/constants.py +0 -87
- pyworkflow/protocol/executor.py +0 -471
- pyworkflow/protocol/hosts.py +0 -314
- pyworkflow/protocol/launch.py +0 -270
- pyworkflow/protocol/package.py +0 -42
- pyworkflow/protocol/params.py +0 -741
- pyworkflow/protocol/protocol.py +0 -2641
- pyworkflow/tests/__init__.py +0 -29
- pyworkflow/tests/test_utils.py +0 -25
- pyworkflow/tests/tests.py +0 -341
- pyworkflow/utils/__init__.py +0 -38
- pyworkflow/utils/dataset.py +0 -414
- pyworkflow/utils/echo.py +0 -104
- pyworkflow/utils/graph.py +0 -169
- pyworkflow/utils/log.py +0 -284
- pyworkflow/utils/path.py +0 -528
- pyworkflow/utils/process.py +0 -153
- pyworkflow/utils/profiler.py +0 -92
- pyworkflow/utils/progressbar.py +0 -154
- pyworkflow/utils/properties.py +0 -631
- pyworkflow/utils/reflection.py +0 -129
- pyworkflow/utils/utils.py +0 -879
- pyworkflow/utils/which.py +0 -229
- pyworkflow/webservices/__init__.py +0 -8
- pyworkflow/webservices/config.py +0 -11
- pyworkflow/webservices/notifier.py +0 -162
- pyworkflow/webservices/repository.py +0 -59
- pyworkflow/webservices/workflowhub.py +0 -74
- pyworkflowtests/tests/__init__.py +0 -0
- pyworkflowtests/tests/test_canvas.py +0 -72
- pyworkflowtests/tests/test_domain.py +0 -45
- pyworkflowtests/tests/test_logs.py +0 -74
- pyworkflowtests/tests/test_mappers.py +0 -392
- pyworkflowtests/tests/test_object.py +0 -507
- pyworkflowtests/tests/test_project.py +0 -42
- pyworkflowtests/tests/test_protocol_execution.py +0 -142
- pyworkflowtests/tests/test_protocol_export.py +0 -78
- pyworkflowtests/tests/test_protocol_output.py +0 -158
- pyworkflowtests/tests/test_streaming.py +0 -47
- pyworkflowtests/tests/test_utils.py +0 -210
- scipion_pyworkflow-3.10.6.dist-info/RECORD +0 -140
- scipion_pyworkflow-3.10.6.dist-info/dependency_links.txt +0 -1
- {scipion_pyworkflow-3.10.6.dist-info → scipion_pyworkflow-3.11.0.dist-info}/entry_points.txt +0 -0
- {scipion_pyworkflow-3.10.6.dist-info → scipion_pyworkflow-3.11.0.dist-info}/licenses/LICENSE.txt +0 -0
- {scipion_pyworkflow-3.10.6.dist-info → scipion_pyworkflow-3.11.0.dist-info}/top_level.txt +0 -0
pyworkflow/project/project.py
DELETED
@@ -1,2007 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python
|
2
|
-
# **************************************************************************
|
3
|
-
# *
|
4
|
-
# * Authors: J.M. De la Rosa Trevin (delarosatrevin@scilifelab.se) [1]
|
5
|
-
# *
|
6
|
-
# * [1] SciLifeLab, Stockholm University
|
7
|
-
# *
|
8
|
-
# * This program is free software; you can redistribute it and/or modify
|
9
|
-
# * it under the terms of the GNU General Public License as published by
|
10
|
-
# * the Free Software Foundation; either version 3 of the License, or
|
11
|
-
# * (at your option) any later version.
|
12
|
-
# *
|
13
|
-
# * This program is distributed in the hope that it will be useful,
|
14
|
-
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
15
|
-
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
16
|
-
# * GNU General Public License for more details.
|
17
|
-
# *
|
18
|
-
# * You should have received a copy of the GNU General Public License
|
19
|
-
# * along with this program; if not, write to the Free Software
|
20
|
-
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
21
|
-
# * 02111-1307 USA
|
22
|
-
# *
|
23
|
-
# * All comments concerning this program package may be sent to the
|
24
|
-
# * e-mail address 'scipion@cnb.csic.es'
|
25
|
-
# *
|
26
|
-
# **************************************************************************
|
27
|
-
import logging
|
28
|
-
|
29
|
-
from ..protocol.launch import _checkJobStatus
|
30
|
-
|
31
|
-
ROOT_NODE_NAME = "PROJECT"
|
32
|
-
logger = logging.getLogger(__name__)
|
33
|
-
from pyworkflow.utils.log import LoggingConfigurator
|
34
|
-
import datetime as dt
|
35
|
-
import json
|
36
|
-
import os
|
37
|
-
import re
|
38
|
-
import time
|
39
|
-
import traceback
|
40
|
-
from collections import OrderedDict
|
41
|
-
|
42
|
-
import pyworkflow as pw
|
43
|
-
from pyworkflow.constants import PROJECT_DBNAME, PROJECT_SETTINGS
|
44
|
-
import pyworkflow.object as pwobj
|
45
|
-
import pyworkflow.protocol as pwprot
|
46
|
-
import pyworkflow.utils as pwutils
|
47
|
-
from pyworkflow.mapper import SqliteMapper
|
48
|
-
from pyworkflow.protocol.constants import (MODE_RESTART, MODE_RESUME,
|
49
|
-
STATUS_INTERACTIVE, ACTIVE_STATUS,
|
50
|
-
UNKNOWN_JOBID, INITIAL_SLEEP_TIME, STATUS_FINISHED)
|
51
|
-
from pyworkflow.protocol.protocol import Protocol
|
52
|
-
|
53
|
-
from . import config
|
54
|
-
|
55
|
-
|
56
|
-
OBJECT_PARENT_ID = pwobj.OBJECT_PARENT_ID
|
57
|
-
PROJECT_LOGS = 'Logs'
|
58
|
-
PROJECT_RUNS = 'Runs'
|
59
|
-
PROJECT_TMP = 'Tmp'
|
60
|
-
PROJECT_UPLOAD = 'Uploads'
|
61
|
-
PROJECT_CONFIG = '.config'
|
62
|
-
PROJECT_CREATION_TIME = 'CreationTime'
|
63
|
-
|
64
|
-
# Regex to get numbering suffix and automatically propose runName
|
65
|
-
REGEX_NUMBER_ENDING = re.compile(r'(?P<prefix>.+)(?P<number>\(\d*\))\s*$')
|
66
|
-
REGEX_NUMBER_ENDING_CP = re.compile(r'(?P<prefix>.+\s\(copy)(?P<number>.*)\)\s*$')
|
67
|
-
|
68
|
-
|
69
|
-
class Project(object):
|
70
|
-
"""This class will handle all information
|
71
|
-
related with a Project"""
|
72
|
-
|
73
|
-
@classmethod
|
74
|
-
def getDbName(cls):
|
75
|
-
""" Return the name of the database file of projects. """
|
76
|
-
return PROJECT_DBNAME
|
77
|
-
|
78
|
-
def __init__(self, domain, path):
|
79
|
-
"""
|
80
|
-
Create a new Project instance.
|
81
|
-
:param domain: The application domain from where to get objects and
|
82
|
-
protocols.
|
83
|
-
:param path: Path where the project will be created/loaded
|
84
|
-
"""
|
85
|
-
self._domain = domain
|
86
|
-
self.name = path
|
87
|
-
self.shortName = os.path.basename(path)
|
88
|
-
self.path = os.path.abspath(path)
|
89
|
-
self._isLink = os.path.islink(path)
|
90
|
-
self._isInReadOnlyFolder = False
|
91
|
-
self.pathList = [] # Store all related paths
|
92
|
-
self.dbPath = self.__addPath(PROJECT_DBNAME)
|
93
|
-
self.logsPath = self.__addPath(PROJECT_LOGS)
|
94
|
-
self.runsPath = self.__addPath(PROJECT_RUNS)
|
95
|
-
self.tmpPath = self.__addPath(PROJECT_TMP)
|
96
|
-
self.uploadPath = self.__addPath(PROJECT_UPLOAD)
|
97
|
-
self.settingsPath = self.__addPath(PROJECT_SETTINGS)
|
98
|
-
self.configPath = self.__addPath(PROJECT_CONFIG)
|
99
|
-
self.runs = None
|
100
|
-
self._runsGraph = None
|
101
|
-
self._transformGraph = None
|
102
|
-
self._sourceGraph = None
|
103
|
-
self.address = ''
|
104
|
-
self.port = pwutils.getFreePort()
|
105
|
-
self.mapper = None
|
106
|
-
self.settings:config.ProjectSettings = None
|
107
|
-
# Host configuration
|
108
|
-
self._hosts = None
|
109
|
-
|
110
|
-
# Creation time should be stored in project.sqlite when the project
|
111
|
-
# is created and then loaded with other properties from the database
|
112
|
-
self._creationTime = None
|
113
|
-
|
114
|
-
# Time stamp with the last run has been updated
|
115
|
-
self._lastRunTime = None
|
116
|
-
|
117
|
-
def getObjId(self):
|
118
|
-
""" Return the unique id assigned to this project. """
|
119
|
-
return os.path.basename(self.path)
|
120
|
-
|
121
|
-
def __addPath(self, *paths):
|
122
|
-
"""Store a path needed for the project"""
|
123
|
-
p = self.getPath(*paths)
|
124
|
-
self.pathList.append(p)
|
125
|
-
return p
|
126
|
-
|
127
|
-
def getPath(self, *paths):
|
128
|
-
"""Return path from the project root"""
|
129
|
-
if paths:
|
130
|
-
return os.path.join(*paths) # Why this is relative!!
|
131
|
-
else:
|
132
|
-
return self.path
|
133
|
-
|
134
|
-
def isLink(self):
|
135
|
-
"""Returns if the project path is a link to another folder."""
|
136
|
-
return self._isLink
|
137
|
-
|
138
|
-
def getDbPath(self):
|
139
|
-
""" Return the path to the sqlite db. """
|
140
|
-
return self.dbPath
|
141
|
-
|
142
|
-
def getDbLastModificationDate(self):
|
143
|
-
""" Return the last modification date of the database """
|
144
|
-
pwutils.getFileLastModificationDate(self.getDbPath())
|
145
|
-
|
146
|
-
def getCreationTime(self):
|
147
|
-
""" Return the time when the project was created. """
|
148
|
-
# In project.create method, the first object inserted
|
149
|
-
# in the mapper should be the creation time
|
150
|
-
return self._creationTime.datetime()
|
151
|
-
|
152
|
-
|
153
|
-
def getComment(self):
|
154
|
-
""" Returns the project comment. Stored as CreationTime comment."""
|
155
|
-
return self._creationTime.getObjComment()
|
156
|
-
|
157
|
-
def setComment(self, newComment):
|
158
|
-
""" Sets the project comment """
|
159
|
-
self._creationTime.setObjComment(newComment)
|
160
|
-
|
161
|
-
def getSettingsCreationTime(self):
|
162
|
-
return self.settings.getCreationTime()
|
163
|
-
|
164
|
-
def getElapsedTime(self):
|
165
|
-
""" Returns the time elapsed from the creation to the last
|
166
|
-
execution time. """
|
167
|
-
if self._creationTime and self._lastRunTime:
|
168
|
-
creationTs = self.getCreationTime()
|
169
|
-
lastRunTs = self._lastRunTime.datetime()
|
170
|
-
return lastRunTs - creationTs
|
171
|
-
return None
|
172
|
-
|
173
|
-
def getLeftTime(self):
|
174
|
-
lifeTime = self.settings.getLifeTime()
|
175
|
-
if lifeTime:
|
176
|
-
td = dt.timedelta(hours=lifeTime)
|
177
|
-
return td - self.getElapsedTime()
|
178
|
-
else:
|
179
|
-
return None
|
180
|
-
|
181
|
-
def setDbPath(self, dbPath):
|
182
|
-
""" Set the project db path.
|
183
|
-
This function is used when running a protocol where
|
184
|
-
a project is loaded but using the protocol own sqlite file.
|
185
|
-
"""
|
186
|
-
# First remove from pathList the old dbPath
|
187
|
-
self.pathList.remove(self.dbPath)
|
188
|
-
self.dbPath = os.path.abspath(dbPath)
|
189
|
-
self.pathList.append(self.dbPath)
|
190
|
-
|
191
|
-
def getName(self):
|
192
|
-
return self.name
|
193
|
-
|
194
|
-
def getDomain(self):
|
195
|
-
return self._domain
|
196
|
-
|
197
|
-
# TODO: maybe it has more sense to use this behaviour
|
198
|
-
# for just getName function...
|
199
|
-
def getShortName(self):
|
200
|
-
return self.shortName
|
201
|
-
|
202
|
-
def getTmpPath(self, *paths):
|
203
|
-
return self.getPath(PROJECT_TMP, *paths)
|
204
|
-
|
205
|
-
def getLogPath(self, *paths):
|
206
|
-
return self.getPath(PROJECT_LOGS, *paths)
|
207
|
-
|
208
|
-
def getProjectLog(self):
|
209
|
-
return os.path.join(self.path,self.getLogPath("project.log")) # For some reason getLogsPath is relative!
|
210
|
-
|
211
|
-
def getSettings(self):
|
212
|
-
return self.settings
|
213
|
-
|
214
|
-
def saveSettings(self):
|
215
|
-
# Read only mode
|
216
|
-
if not self.openedAsReadOnly():
|
217
|
-
self.settings.write()
|
218
|
-
|
219
|
-
def createSettings(self, runsView=1, readOnly=False):
|
220
|
-
self.settings = config.ProjectSettings()
|
221
|
-
self.settings.setRunsView(runsView)
|
222
|
-
self.settings.setReadOnly(readOnly)
|
223
|
-
self.settings.write(self.settingsPath)
|
224
|
-
return self.settings
|
225
|
-
|
226
|
-
def createMapper(self, sqliteFn):
|
227
|
-
""" Create a new SqliteMapper object and pass as classes dict
|
228
|
-
all globals and update with data and protocols from em.
|
229
|
-
"""
|
230
|
-
classesDict = pwobj.Dict(default=pwprot.LegacyProtocol)
|
231
|
-
classesDict.update(self._domain.getMapperDict())
|
232
|
-
classesDict.update(config.__dict__)
|
233
|
-
return SqliteMapper(sqliteFn, classesDict)
|
234
|
-
|
235
|
-
def load(self, dbPath=None, hostsConf=None, protocolsConf=None, chdir=True,
|
236
|
-
loadAllConfig=True):
|
237
|
-
"""
|
238
|
-
Load project data, configuration and settings.
|
239
|
-
|
240
|
-
:param dbPath: the path to the project database.
|
241
|
-
If None, use the project.sqlite in the project folder.
|
242
|
-
:param hostsConf: where to read the host configuration.
|
243
|
-
If None, check if exists in .config/hosts.conf
|
244
|
-
or read from ~/.config/scipion/hosts.conf
|
245
|
-
:param protocolsConf: Not used
|
246
|
-
:param chdir: If True, os.cwd will be set to project's path.
|
247
|
-
:param loadAllConfig: If True, settings from settings.sqlite will also be loaded
|
248
|
-
|
249
|
-
"""
|
250
|
-
|
251
|
-
if not os.path.exists(self.path):
|
252
|
-
raise Exception("Cannot load project, path doesn't exist: %s"
|
253
|
-
% self.path)
|
254
|
-
|
255
|
-
# If folder is read only, flag it and warn about it.
|
256
|
-
if not os.access(self.path, os.W_OK):
|
257
|
-
self._isInReadOnlyFolder = True
|
258
|
-
logger.warning("Project \"%s\": you don't have write permissions "
|
259
|
-
"for project folder. Loading asd READ-ONLY." % self.shortName)
|
260
|
-
|
261
|
-
if chdir:
|
262
|
-
os.chdir(self.path) # Before doing nothing go to project dir
|
263
|
-
|
264
|
-
try:
|
265
|
-
self._loadDb(dbPath)
|
266
|
-
self._loadHosts(hostsConf)
|
267
|
-
|
268
|
-
if loadAllConfig:
|
269
|
-
|
270
|
-
# FIXME: Handle settings argument here
|
271
|
-
|
272
|
-
# It is possible that settings does not exists if
|
273
|
-
# we are loading a project after a Project.setDbName,
|
274
|
-
# used when running protocols
|
275
|
-
settingsPath = os.path.join(self.path, self.settingsPath)
|
276
|
-
|
277
|
-
logger.debug("settingsPath: %s" % settingsPath)
|
278
|
-
|
279
|
-
if os.path.exists(settingsPath):
|
280
|
-
self.settings = config.ProjectSettings.load(settingsPath)
|
281
|
-
else:
|
282
|
-
logger.info("settings is None")
|
283
|
-
self.settings = None
|
284
|
-
|
285
|
-
self._loadCreationTime()
|
286
|
-
|
287
|
-
# Catch DB not found exception (when loading a project from a folder
|
288
|
-
# without project.sqlite
|
289
|
-
except MissingProjectDbException as noDBe:
|
290
|
-
# Raise it at before: This is a critical error and should be raised
|
291
|
-
raise noDBe
|
292
|
-
|
293
|
-
# Catch any less severe exception..to allow at least open the project.
|
294
|
-
# except Exception as e:
|
295
|
-
# logger.info("ERROR: Project %s load failed.\n"
|
296
|
-
# " Message: %s\n" % (self.path, e))
|
297
|
-
|
298
|
-
def configureLogging(self):
|
299
|
-
LoggingConfigurator.setUpGUILogging(self.getProjectLog())
|
300
|
-
def _loadCreationTime(self):
|
301
|
-
# Load creation time, it should be in project.sqlite or
|
302
|
-
# in some old projects it is found in settings.sqlite
|
303
|
-
|
304
|
-
creationTime = self.mapper.selectBy(name=PROJECT_CREATION_TIME)
|
305
|
-
|
306
|
-
if creationTime: # CreationTime was found in project.sqlite
|
307
|
-
ctStr = creationTime[0] # This is our String type instance
|
308
|
-
|
309
|
-
# We store it in mem as datetime
|
310
|
-
self._creationTime = ctStr
|
311
|
-
|
312
|
-
else:
|
313
|
-
|
314
|
-
# If connected to project.sqlite and not any or the run.db
|
315
|
-
if self.path.endswith(PROJECT_DBNAME):
|
316
|
-
# We should read the creation time from settings.sqlite and
|
317
|
-
# update the CreationTime in the project.sqlite
|
318
|
-
self._creationTime = pwobj.String(self.getSettingsCreationTime())
|
319
|
-
self._storeCreationTime()
|
320
|
-
|
321
|
-
# ---- Helper functions to load different pieces of a project
|
322
|
-
def _loadDb(self, dbPath):
|
323
|
-
""" Load the mapper from the sqlite file in dbPath. """
|
324
|
-
if dbPath is not None:
|
325
|
-
self.setDbPath(dbPath)
|
326
|
-
|
327
|
-
absDbPath = os.path.join(self.path, self.dbPath)
|
328
|
-
if not os.path.exists(absDbPath):
|
329
|
-
raise MissingProjectDbException(
|
330
|
-
"Project database not found at '%s'" % absDbPath)
|
331
|
-
self.mapper = self.createMapper(absDbPath)
|
332
|
-
|
333
|
-
def closeMapper(self):
|
334
|
-
if self.mapper is not None:
|
335
|
-
self.mapper.close()
|
336
|
-
self.mapper = None
|
337
|
-
|
338
|
-
def getLocalConfigHosts(self):
|
339
|
-
""" Return the local file where the project will try to
|
340
|
-
read the hosts configuration. """
|
341
|
-
return self.getPath(PROJECT_CONFIG, pw.Config.SCIPION_HOSTS)
|
342
|
-
|
343
|
-
def _loadHosts(self, hosts):
|
344
|
-
""" Loads hosts configuration from hosts file. """
|
345
|
-
# If the host file is not passed as argument...
|
346
|
-
configHosts = pw.Config.SCIPION_HOSTS
|
347
|
-
projHosts = self.getLocalConfigHosts()
|
348
|
-
|
349
|
-
if hosts is None:
|
350
|
-
# Try first to read it from the project file .config./hosts.conf
|
351
|
-
if os.path.exists(projHosts):
|
352
|
-
hostsFile = projHosts
|
353
|
-
else:
|
354
|
-
localDir = os.path.dirname(pw.Config.SCIPION_LOCAL_CONFIG)
|
355
|
-
hostsFile = os.path.join(localDir, configHosts)
|
356
|
-
else:
|
357
|
-
pwutils.copyFile(hosts, projHosts)
|
358
|
-
hostsFile = hosts
|
359
|
-
|
360
|
-
self._hosts = pwprot.HostConfig.load(hostsFile)
|
361
|
-
|
362
|
-
def getHostNames(self):
|
363
|
-
""" Return the list of host name in the project. """
|
364
|
-
return list(self._hosts.keys())
|
365
|
-
|
366
|
-
def getHostConfig(self, hostName):
|
367
|
-
if hostName in self._hosts:
|
368
|
-
hostKey = hostName
|
369
|
-
else:
|
370
|
-
hostKey = self.getHostNames()[0]
|
371
|
-
logger.warning("Protocol host '%s' not found." % hostName)
|
372
|
-
logger.warning(" Using '%s' instead." % hostKey)
|
373
|
-
|
374
|
-
return self._hosts[hostKey]
|
375
|
-
|
376
|
-
def getProtocolView(self):
|
377
|
-
""" Returns de view selected in the tree when it was persisted"""
|
378
|
-
return self.settings.getProtocolView()
|
379
|
-
|
380
|
-
def create(self, runsView=1, readOnly=False, hostsConf=None,
|
381
|
-
protocolsConf=None, comment=None):
|
382
|
-
"""Prepare all required paths and files to create a new project.
|
383
|
-
|
384
|
-
:param runsView: default view to associate the project with
|
385
|
-
:param readOnly: If True, project will be loaded as read only.
|
386
|
-
:param hostsConf: Path to the host.conf to be used when executing protocols
|
387
|
-
:param protocolsConf: Not used.
|
388
|
-
"""
|
389
|
-
# Create project path if not exists
|
390
|
-
pwutils.path.makePath(self.path)
|
391
|
-
os.chdir(self.path) # Before doing nothing go to project dir
|
392
|
-
self._cleanData()
|
393
|
-
logger.info("Creating project at %s" % os.path.abspath(self.dbPath))
|
394
|
-
# Create db through the mapper
|
395
|
-
self.mapper = self.createMapper(self.dbPath)
|
396
|
-
# Store creation time
|
397
|
-
self._creationTime = pwobj.String(dt.datetime.now())
|
398
|
-
self.setComment(comment)
|
399
|
-
self._storeCreationTime()
|
400
|
-
# Load settings from .conf files and write .sqlite
|
401
|
-
self.settings = self.createSettings(runsView=runsView,
|
402
|
-
readOnly=readOnly)
|
403
|
-
# Create other paths inside project
|
404
|
-
for p in self.pathList:
|
405
|
-
pwutils.path.makePath(p)
|
406
|
-
|
407
|
-
self._loadHosts(hostsConf)
|
408
|
-
|
409
|
-
def _storeCreationTime(self, new=True):
|
410
|
-
""" Store the creation time in the project db. """
|
411
|
-
# Store creation time
|
412
|
-
self._creationTime.setName(PROJECT_CREATION_TIME)
|
413
|
-
self.mapper.store(self._creationTime)
|
414
|
-
self.mapper.commit()
|
415
|
-
|
416
|
-
def _cleanData(self):
|
417
|
-
"""Clean all project data"""
|
418
|
-
pwutils.path.cleanPath(*self.pathList)
|
419
|
-
|
420
|
-
def _continueWorkflow(self, errorsList, continuedProtList=None):
|
421
|
-
"""
|
422
|
-
This function continue a workflow from a selected protocol.
|
423
|
-
The previous results are preserved.
|
424
|
-
Actions done here are:
|
425
|
-
1. if the protocol list exists (for each protocol)
|
426
|
-
1.1 if the protocol is not an interactive protocol
|
427
|
-
1.1.1. If the protocol is in streaming (CONTINUE ACTION):
|
428
|
-
- 'dataStreaming' parameter if the protocol is an import
|
429
|
-
protocol
|
430
|
-
- check if the __stepsCheck function exist and it's not
|
431
|
-
the same implementation of the base class
|
432
|
-
(worksInStreaming function)
|
433
|
-
1.1.1.1 Open the protocol sets, store and save them in
|
434
|
-
the database
|
435
|
-
1.1.1.2 Change the protocol status (SAVED)
|
436
|
-
1.1.1.3 Schedule the protocol
|
437
|
-
Else Restart the workflow from that point (RESTART ACTION) if
|
438
|
-
at least one protocol in streaming has been launched
|
439
|
-
"""
|
440
|
-
if continuedProtList is not None:
|
441
|
-
for protocol, level in continuedProtList.values():
|
442
|
-
if not protocol.isInteractive():
|
443
|
-
if protocol.isScheduled():
|
444
|
-
continue
|
445
|
-
|
446
|
-
# streaming ...
|
447
|
-
if protocol.worksInStreaming() and not protocol.isSaved():
|
448
|
-
attrSet = [attr for name, attr in
|
449
|
-
protocol.iterOutputAttributes(pwprot.Set)]
|
450
|
-
try:
|
451
|
-
if attrSet:
|
452
|
-
# Open output sets..
|
453
|
-
for attr in attrSet:
|
454
|
-
attr.setStreamState(attr.STREAM_OPEN)
|
455
|
-
attr.write()
|
456
|
-
attr.close()
|
457
|
-
protocol.setStatus(pwprot.STATUS_SAVED)
|
458
|
-
protocol._updateSteps(lambda step: step.setStatus(pwprot.STATUS_SAVED))
|
459
|
-
protocol.setMapper(self.createMapper(protocol.getDbPath()))
|
460
|
-
protocol._store()
|
461
|
-
self._storeProtocol(protocol)
|
462
|
-
self.scheduleProtocol(protocol,
|
463
|
-
initialSleepTime=level*INITIAL_SLEEP_TIME)
|
464
|
-
except Exception as ex:
|
465
|
-
errorsList.append("Error trying to launch the "
|
466
|
-
"protocol: %s\nERROR: %s\n" %
|
467
|
-
(protocol.getObjLabel(), ex))
|
468
|
-
break
|
469
|
-
else:
|
470
|
-
if level != 0:
|
471
|
-
# Not in streaming and not the first protocol.
|
472
|
-
if protocol.isActive():
|
473
|
-
self.stopProtocol(protocol)
|
474
|
-
self._restartWorkflow(errorsList,{protocol.getObjId(): (protocol, level)})
|
475
|
-
|
476
|
-
else: # First protocol not in streaming
|
477
|
-
if not protocol.isActive():
|
478
|
-
self.scheduleProtocol(protocol)
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
def _restartWorkflow(self, errorsList, restartedProtList=None):
|
483
|
-
"""
|
484
|
-
This function restart a workflow from a selected protocol.
|
485
|
-
All previous results will be deleted
|
486
|
-
Actions done here are:
|
487
|
-
1. Set the protocol run mode (RESTART). All previous results will be
|
488
|
-
deleted
|
489
|
-
2. Schedule the protocol if not is an interactive protocol
|
490
|
-
3. For each of the dependents protocols, repeat from step 1
|
491
|
-
"""
|
492
|
-
if restartedProtList is not None:
|
493
|
-
for protocol, level in restartedProtList.values():
|
494
|
-
if not protocol.isInteractive():
|
495
|
-
try:
|
496
|
-
if protocol.isScheduled():
|
497
|
-
continue
|
498
|
-
elif protocol.isActive():
|
499
|
-
self.stopProtocol(protocol)
|
500
|
-
protocol.runMode.set(MODE_RESTART)
|
501
|
-
self.scheduleProtocol(protocol,
|
502
|
-
initialSleepTime=level*INITIAL_SLEEP_TIME)
|
503
|
-
except Exception as ex:
|
504
|
-
errorsList.append("Error trying to restart a protocol: %s"
|
505
|
-
"\nERROR: %s\n" % (protocol.getObjLabel(),
|
506
|
-
ex))
|
507
|
-
break
|
508
|
-
else:
|
509
|
-
protocol.setStatus(pwprot.STATUS_SAVED)
|
510
|
-
self._storeProtocol(protocol)
|
511
|
-
protocol.runMode.set(MODE_RESTART)
|
512
|
-
self._setupProtocol(protocol)
|
513
|
-
protocol.makePathsAndClean() # Create working dir if necessary
|
514
|
-
# Delete the relations created by this protocol
|
515
|
-
self.mapper.deleteRelations(self)
|
516
|
-
self.mapper.commit()
|
517
|
-
self.mapper.store(protocol)
|
518
|
-
self.mapper.commit()
|
519
|
-
|
520
|
-
def _fixProtParamsConfiguration(self, protocol=None):
|
521
|
-
"""
|
522
|
-
This function fix:
|
523
|
-
1. The old parameters configuration in the protocols.
|
524
|
-
Now, dependent protocols have a pointer to the parent protocol, and
|
525
|
-
the extended parameter has a parent output value
|
526
|
-
"""
|
527
|
-
# Take the old configuration attributes and fix the pointer
|
528
|
-
oldStylePointerList = [item for key, item in
|
529
|
-
protocol.iterInputAttributes()
|
530
|
-
if not isinstance(item.getObjValue(),
|
531
|
-
pwprot.Protocol)]
|
532
|
-
if oldStylePointerList:
|
533
|
-
# Fix the protocol parameters
|
534
|
-
for pointer in oldStylePointerList:
|
535
|
-
auxPointer = pointer.getObjValue()
|
536
|
-
pointer.set(self.getRunsGraph().getNode(str(pointer.get().getObjParentId())).run)
|
537
|
-
pointer.setExtended(auxPointer.getLastName())
|
538
|
-
protocol._store()
|
539
|
-
self._storeProtocol(protocol)
|
540
|
-
self._updateProtocol(protocol)
|
541
|
-
self.mapper.commit()
|
542
|
-
|
543
|
-
def stopWorkFlow(self, activeProtList):
|
544
|
-
"""
|
545
|
-
This function can stop a workflow from a selected protocol
|
546
|
-
:param initialProtocol: selected protocol
|
547
|
-
"""
|
548
|
-
errorProtList = []
|
549
|
-
for protocol in activeProtList.values():
|
550
|
-
try:
|
551
|
-
self.stopProtocol(protocol)
|
552
|
-
except Exception:
|
553
|
-
errorProtList.append(protocol)
|
554
|
-
return errorProtList
|
555
|
-
|
556
|
-
def resetWorkFlow(self, workflowProtocolList):
|
557
|
-
"""
|
558
|
-
This function can reset a workflow from a selected protocol
|
559
|
-
:param initialProtocol: selected protocol
|
560
|
-
"""
|
561
|
-
errorProtList = []
|
562
|
-
if workflowProtocolList:
|
563
|
-
for protocol, level in workflowProtocolList.values():
|
564
|
-
if protocol.getStatus() != pwprot.STATUS_SAVED:
|
565
|
-
try:
|
566
|
-
self.resetProtocol(protocol)
|
567
|
-
except Exception:
|
568
|
-
errorProtList.append(protocol)
|
569
|
-
return errorProtList
|
570
|
-
|
571
|
-
def launchWorkflow(self, workflowProtocolList, mode=MODE_RESUME):
|
572
|
-
"""
|
573
|
-
This function can launch a workflow from a selected protocol in two
|
574
|
-
modes depending on the 'mode' value (RESTART, CONTINUE)
|
575
|
-
Actions done here are:
|
576
|
-
|
577
|
-
1. Check if the workflow has active protocols.
|
578
|
-
2. Fix the workflow if is not properly configured
|
579
|
-
3. Restart or Continue a workflow starting from the protocol depending
|
580
|
-
on the 'mode' value
|
581
|
-
|
582
|
-
"""
|
583
|
-
errorsList = []
|
584
|
-
if mode == MODE_RESTART:
|
585
|
-
self._restartWorkflow(errorsList, workflowProtocolList)
|
586
|
-
else:
|
587
|
-
self._continueWorkflow(errorsList,workflowProtocolList)
|
588
|
-
return errorsList
|
589
|
-
|
590
|
-
def launchProtocol(self, protocol, wait=False, scheduled=False,
|
591
|
-
force=False):
|
592
|
-
""" In this function the action of launching a protocol
|
593
|
-
will be initiated. Actions done here are:
|
594
|
-
|
595
|
-
1. Store the protocol and assign name and working dir
|
596
|
-
2. Create the working dir and also the protocol independent db
|
597
|
-
3. Call the launch method in protocol.job to handle submission:
|
598
|
-
mpi, thread, queue.
|
599
|
-
|
600
|
-
If the protocol has some prerequisites (other protocols that
|
601
|
-
needs to be finished first), it will be scheduled.
|
602
|
-
|
603
|
-
:param protocol: Protocol instance to launch
|
604
|
-
:param wait: Optional. If true, this method
|
605
|
-
will wait until execution is finished. Used in tests.
|
606
|
-
:param scheduled: Optional. If true, run.db and paths
|
607
|
-
already exist and are preserved.
|
608
|
-
:param force: Optional. If true, launch is forced, regardless
|
609
|
-
latter dependent executions. Used when restarting many protocols a once.
|
610
|
-
|
611
|
-
"""
|
612
|
-
if protocol.getPrerequisites() and not scheduled:
|
613
|
-
return self.scheduleProtocol(protocol)
|
614
|
-
|
615
|
-
isRestart = protocol.getRunMode() == MODE_RESTART
|
616
|
-
|
617
|
-
if not force:
|
618
|
-
if (not protocol.isInteractive() and not protocol.isInStreaming()) or isRestart:
|
619
|
-
self._checkModificationAllowed([protocol],
|
620
|
-
'Cannot RE-LAUNCH protocol')
|
621
|
-
|
622
|
-
protocol.setStatus(pwprot.STATUS_LAUNCHED)
|
623
|
-
self._setupProtocol(protocol)
|
624
|
-
|
625
|
-
# Prepare a separate db for this run if not from schedule jobs
|
626
|
-
# Scheduled protocols will load the project db from the run.db file,
|
627
|
-
# so there is no need to copy the database
|
628
|
-
|
629
|
-
if not scheduled:
|
630
|
-
protocol.makePathsAndClean() # Create working dir if necessary
|
631
|
-
# Delete the relations created by this protocol
|
632
|
-
if isRestart:
|
633
|
-
self.mapper.deleteRelations(self)
|
634
|
-
# Clean and persist execution attributes; otherwise, this would retain old job IDs and PIDs.
|
635
|
-
protocol.cleanExecutionAttributes()
|
636
|
-
protocol._store(protocol._jobId)
|
637
|
-
|
638
|
-
self.mapper.commit()
|
639
|
-
|
640
|
-
# NOTE: now we are simply copying the entire project db, this can be
|
641
|
-
# changed later to only create a subset of the db need for the run
|
642
|
-
pwutils.path.copyFile(self.dbPath, protocol.getDbPath())
|
643
|
-
|
644
|
-
# Launch the protocol; depending on the case, either the pId or the jobId will be set in this call
|
645
|
-
pwprot.launch(protocol, wait)
|
646
|
-
|
647
|
-
# Commit changes
|
648
|
-
if wait: # This is only useful for launching tests...
|
649
|
-
self._updateProtocol(protocol)
|
650
|
-
else:
|
651
|
-
self.mapper.store(protocol)
|
652
|
-
self.mapper.commit()
|
653
|
-
|
654
|
-
def scheduleProtocol(self, protocol, prerequisites=[], initialSleepTime=0):
|
655
|
-
""" Schedule a new protocol that will run when the input data
|
656
|
-
is available and the prerequisites are finished.
|
657
|
-
|
658
|
-
:param protocol: the protocol that will be scheduled.
|
659
|
-
:param prerequisites: a list with protocols ids that the scheduled
|
660
|
-
protocol will wait for.
|
661
|
-
:param initialSleepTime: number of seconds to wait before
|
662
|
-
checking input's availability
|
663
|
-
|
664
|
-
"""
|
665
|
-
isRestart = protocol.getRunMode() == MODE_RESTART
|
666
|
-
|
667
|
-
protocol.setStatus(pwprot.STATUS_SCHEDULED)
|
668
|
-
protocol.addPrerequisites(*prerequisites)
|
669
|
-
|
670
|
-
self._setupProtocol(protocol)
|
671
|
-
protocol.makePathsAndClean() # Create working dir if necessary
|
672
|
-
# Delete the relations created by this protocol if any
|
673
|
-
if isRestart:
|
674
|
-
self.mapper.deleteRelations(self)
|
675
|
-
self.mapper.commit()
|
676
|
-
|
677
|
-
# Prepare a separate db for this run
|
678
|
-
# NOTE: now we are simply copying the entire project db, this can be
|
679
|
-
# changed later to only create a subset of the db need for the run
|
680
|
-
pwutils.path.copyFile(self.dbPath, protocol.getDbPath())
|
681
|
-
# Launch the protocol, the jobId should be set after this call
|
682
|
-
pwprot.schedule(protocol, initialSleepTime=initialSleepTime)
|
683
|
-
self.mapper.store(protocol)
|
684
|
-
self.mapper.commit()
|
685
|
-
|
686
|
-
def _updateProtocol(self, protocol: Protocol, tries=0, checkPid=False,
|
687
|
-
skipUpdatedProtocols=True):
|
688
|
-
|
689
|
-
# If this is read only exit
|
690
|
-
if self.openedAsReadOnly():
|
691
|
-
return pw.NOT_UPDATED_READ_ONLY
|
692
|
-
|
693
|
-
try:
|
694
|
-
|
695
|
-
# Backup the values of 'jobId', 'label' and 'comment'
|
696
|
-
# to be restored after the .copy
|
697
|
-
jobId = protocol.getJobIds().clone() # Use clone to prevent this variable from being overwritten or cleared in the latter .copy() call
|
698
|
-
label = protocol.getObjLabel()
|
699
|
-
comment = protocol.getObjComment()
|
700
|
-
|
701
|
-
if skipUpdatedProtocols:
|
702
|
-
# If we are already updated, comparing timestamps
|
703
|
-
if pwprot.isProtocolUpToDate(protocol):
|
704
|
-
|
705
|
-
# Always check for the status of the process (queue job or pid)
|
706
|
-
self.checkIsAlive(protocol)
|
707
|
-
return pw.NOT_UPDATED_UNNECESSARY
|
708
|
-
|
709
|
-
|
710
|
-
# If the protocol database has ....
|
711
|
-
# Comparing date will not work unless we have a reliable
|
712
|
-
# lastModificationDate of a protocol in the project.sqlite
|
713
|
-
prot2 = pwprot.getProtocolFromDb(self.path,
|
714
|
-
protocol.getDbPath(),
|
715
|
-
protocol.getObjId())
|
716
|
-
|
717
|
-
# Capture the db timestamp before loading.
|
718
|
-
lastUpdateTime = pwutils.getFileLastModificationDate(protocol.getDbPath())
|
719
|
-
|
720
|
-
# Copy is only working for db restored objects
|
721
|
-
protocol.setMapper(self.mapper)
|
722
|
-
|
723
|
-
localOutputs = list(protocol._outputs)
|
724
|
-
protocol.copy(prot2, copyId=False, excludeInputs=True)
|
725
|
-
|
726
|
-
# merge outputs: This is necessary when outputs are added from the GUI
|
727
|
-
# e.g.: adding coordinates from analyze result and protocol is active (interactive).
|
728
|
-
for attr in localOutputs:
|
729
|
-
if attr not in protocol._outputs:
|
730
|
-
protocol._outputs.append(attr)
|
731
|
-
|
732
|
-
# Restore backup values
|
733
|
-
if protocol.useQueueForProtocol() and jobId: # If jobId not empty then restore value as the db is empty
|
734
|
-
# Case for direct protocol launch from the GUI. Without passing through a scheduling process.
|
735
|
-
# In this case the jobid is obtained by the GUI and the job id should be preserved.
|
736
|
-
protocol.setJobIds(jobId)
|
737
|
-
|
738
|
-
# In case of scheduling a protocol, the jobid is obtained during the "scheduling job"
|
739
|
-
# and it is written in the rub.db. Therefore, it should be taken from there.
|
740
|
-
|
741
|
-
protocol.setObjLabel(label)
|
742
|
-
protocol.setObjComment(comment)
|
743
|
-
# Use the run.db timestamp instead of the system TS to prevent
|
744
|
-
# possible inconsistencies.
|
745
|
-
protocol.lastUpdateTimeStamp.set(lastUpdateTime)
|
746
|
-
|
747
|
-
# Check pid at the end, once updated
|
748
|
-
if checkPid:
|
749
|
-
self.checkIsAlive(protocol)
|
750
|
-
|
751
|
-
self.mapper.store(protocol)
|
752
|
-
|
753
|
-
# Close DB connections
|
754
|
-
prot2.getProject().closeMapper()
|
755
|
-
prot2.closeMappers()
|
756
|
-
|
757
|
-
except Exception as ex:
|
758
|
-
if tries == 3: # 3 tries have been failed
|
759
|
-
traceback.print_exc()
|
760
|
-
# If any problem happens, the protocol will be marked
|
761
|
-
# with a FAILED status
|
762
|
-
try:
|
763
|
-
protocol.setFailed(str(ex))
|
764
|
-
self.mapper.store(protocol)
|
765
|
-
except Exception:
|
766
|
-
pass
|
767
|
-
return pw.NOT_UPDATED_ERROR
|
768
|
-
else:
|
769
|
-
logger.warning("Couldn't update protocol %s(jobId=%s) from it's own database. ERROR: %s, attempt=%d"
|
770
|
-
% (protocol.getObjName(), jobId, ex, tries))
|
771
|
-
time.sleep(0.5)
|
772
|
-
self._updateProtocol(protocol, tries + 1)
|
773
|
-
|
774
|
-
return pw.PROTOCOL_UPDATED
|
775
|
-
|
776
|
-
def checkIsAlive(self, protocol):
|
777
|
-
""" Check if a protocol is alive based on its jobid or pid"""
|
778
|
-
if protocol.getPid() == 0:
|
779
|
-
self.checkJobId(protocol)
|
780
|
-
else:
|
781
|
-
self.checkPid(protocol)
|
782
|
-
|
783
|
-
def stopProtocol(self, protocol):
|
784
|
-
""" Stop a running protocol """
|
785
|
-
try:
|
786
|
-
if protocol.getStatus() in ACTIVE_STATUS:
|
787
|
-
self._updateProtocol(protocol) # update protocol to have the latest rub.db values
|
788
|
-
pwprot.stop(protocol)
|
789
|
-
except Exception as e:
|
790
|
-
logger.error("Couldn't stop the protocol: %s" % e)
|
791
|
-
raise
|
792
|
-
finally:
|
793
|
-
protocol.setAborted()
|
794
|
-
protocol.setMapper(self.createMapper(protocol.getDbPath()))
|
795
|
-
protocol._store()
|
796
|
-
self._storeProtocol(protocol)
|
797
|
-
protocol.getMapper().close()
|
798
|
-
|
799
|
-
def resetProtocol(self, protocol):
|
800
|
-
""" Stop a running protocol """
|
801
|
-
try:
|
802
|
-
if protocol.getStatus() in ACTIVE_STATUS:
|
803
|
-
pwprot.stop(protocol)
|
804
|
-
except Exception:
|
805
|
-
raise
|
806
|
-
finally:
|
807
|
-
protocol.setSaved()
|
808
|
-
protocol.runMode.set(MODE_RESTART)
|
809
|
-
protocol.makePathsAndClean() # Create working dir if necessary
|
810
|
-
# Clean jobIds, Pid and StepsDone;
|
811
|
-
protocol.cleanExecutionAttributes() # otherwise, this would retain old executions info
|
812
|
-
protocol._store()
|
813
|
-
|
814
|
-
def continueProtocol(self, protocol):
|
815
|
-
""" This function should be called
|
816
|
-
to mark a protocol that have an interactive step
|
817
|
-
waiting for approval that can continue
|
818
|
-
"""
|
819
|
-
protocol.continueFromInteractive()
|
820
|
-
self.launchProtocol(protocol)
|
821
|
-
|
822
|
-
def __protocolInList(self, prot, protocols):
|
823
|
-
""" Check if a protocol is in a list comparing the ids. """
|
824
|
-
for p in protocols:
|
825
|
-
if p.getObjId() == prot.getObjId():
|
826
|
-
return True
|
827
|
-
return False
|
828
|
-
|
829
|
-
def __validDependency(self, prot, child, protocols):
|
830
|
-
""" Check if the given child is a true dependency of the protocol
|
831
|
-
in order to avoid any modification.
|
832
|
-
"""
|
833
|
-
return (not self.__protocolInList(child, protocols) and
|
834
|
-
not child.isSaved() and not child.isScheduled())
|
835
|
-
|
836
|
-
def _getProtocolsDependencies(self, protocols):
|
837
|
-
error = ''
|
838
|
-
runsGraph = self.getRunsGraph()
|
839
|
-
for prot in protocols:
|
840
|
-
node = runsGraph.getNode(prot.strId())
|
841
|
-
if node:
|
842
|
-
childs = [node.run for node in node.getChildren() if
|
843
|
-
self.__validDependency(prot, node.run, protocols)]
|
844
|
-
if childs:
|
845
|
-
deps = [' ' + c.getRunName() for c in childs]
|
846
|
-
error += '\n *%s* is referenced from:\n - ' % prot.getRunName()
|
847
|
-
error += '\n - '.join(deps)
|
848
|
-
return error
|
849
|
-
|
850
|
-
def _getProtocolDescendents(self, protocol):
|
851
|
-
"""Getting the descendents protocols from a given one"""
|
852
|
-
runsGraph = self.getRunsGraph()
|
853
|
-
visitedNodes = dict()
|
854
|
-
node = runsGraph.getNode(protocol.strId())
|
855
|
-
if node is None:
|
856
|
-
return visitedNodes
|
857
|
-
|
858
|
-
visitedNodes[int(node.getName())] = node
|
859
|
-
|
860
|
-
def getDescendents(rootNode):
|
861
|
-
for child in rootNode.getChildren():
|
862
|
-
if int(child.getName()) not in visitedNodes:
|
863
|
-
visitedNodes[int(child.getName())] = child
|
864
|
-
getDescendents(child)
|
865
|
-
|
866
|
-
getDescendents(node)
|
867
|
-
return visitedNodes
|
868
|
-
|
869
|
-
def getProtocolCompatibleOutputs(self, protocol, classes, condition):
|
870
|
-
"""Getting the outputs compatible with an object type. The outputs of the child protocols are excluded. """
|
871
|
-
objects = []
|
872
|
-
maxNum = 200
|
873
|
-
protocolDescendents = self._getProtocolDescendents(protocol)
|
874
|
-
runs = self.getRuns(refresh=False)
|
875
|
-
|
876
|
-
for prot in runs:
|
877
|
-
# Make sure we don't include previous output of the same
|
878
|
-
# and other descendent protocols
|
879
|
-
if prot.getObjId() not in protocolDescendents:
|
880
|
-
# Check if the protocol itself is one of the desired classes
|
881
|
-
if any(issubclass(prot.getClass(), c) for c in classes):
|
882
|
-
p = pwobj.Pointer(prot)
|
883
|
-
objects.append(p)
|
884
|
-
|
885
|
-
try:
|
886
|
-
# paramName and attr must be set to None
|
887
|
-
# Otherwise, if a protocol has failed and the corresponding output object of type XX does not exist
|
888
|
-
# any other protocol that uses objects of type XX as input will not be able to choose then using
|
889
|
-
# the magnifier glass (object selector of type XX)
|
890
|
-
paramName = None
|
891
|
-
attr = None
|
892
|
-
for paramName, attr in prot.iterOutputAttributes(includePossible=True):
|
893
|
-
def _checkParam(paramName, attr):
|
894
|
-
# If attr is a subclasses of any desired one, add it to the list
|
895
|
-
# we should also check if there is a condition, the object
|
896
|
-
# must comply with the condition
|
897
|
-
p = None
|
898
|
-
|
899
|
-
match = False
|
900
|
-
cancelConditionEval = False
|
901
|
-
possibleOutput = isinstance(attr, type)
|
902
|
-
|
903
|
-
# Go through all compatible Classes coming from in pointerClass string
|
904
|
-
for c in classes:
|
905
|
-
# If attr is an instance
|
906
|
-
if isinstance(attr, c):
|
907
|
-
match = True
|
908
|
-
break
|
909
|
-
# If it is a class already: "possibleOutput" case. In this case attr is the class and not
|
910
|
-
# an instance of c. In this special case
|
911
|
-
elif possibleOutput and attr == c:
|
912
|
-
match = True
|
913
|
-
cancelConditionEval = True
|
914
|
-
|
915
|
-
# If attr matches the class
|
916
|
-
if match:
|
917
|
-
if cancelConditionEval or not condition or attr.evalCondition(condition):
|
918
|
-
p = pwobj.Pointer(prot, extended=paramName)
|
919
|
-
p._allowsSelection = True
|
920
|
-
objects.append(p)
|
921
|
-
return
|
922
|
-
|
923
|
-
# JMRT: For all sets, we don't want to include the
|
924
|
-
# subitems here for performance reasons (e.g. SetOfParticles)
|
925
|
-
# Thus, a Set class can define EXPOSE_ITEMS = True
|
926
|
-
# to enable the inclusion of its items here
|
927
|
-
if getattr(attr, 'EXPOSE_ITEMS', False) and not possibleOutput:
|
928
|
-
# If the ITEM type match any of the desired classes
|
929
|
-
# we will add some elements from the set
|
930
|
-
if (attr.ITEM_TYPE is not None and
|
931
|
-
any(issubclass(attr.ITEM_TYPE, c) for c in classes)):
|
932
|
-
if p is None: # This means the set have not be added
|
933
|
-
p = pwobj.Pointer(prot, extended=paramName)
|
934
|
-
p._allowsSelection = False
|
935
|
-
objects.append(p)
|
936
|
-
# Add each item on the set to the list of objects
|
937
|
-
try:
|
938
|
-
for i, item in enumerate(attr):
|
939
|
-
if i == maxNum: # Only load up to NUM particles
|
940
|
-
break
|
941
|
-
pi = pwobj.Pointer(prot, extended=paramName)
|
942
|
-
pi.addExtended(item.getObjId())
|
943
|
-
pi._parentObject = p
|
944
|
-
objects.append(pi)
|
945
|
-
except Exception as ex:
|
946
|
-
print("Error loading items from:")
|
947
|
-
print(" protocol: %s, attribute: %s" % (prot.getRunName(), paramName))
|
948
|
-
print(" dbfile: ", os.path.join(self.getPath(), attr.getFileName()))
|
949
|
-
print(ex)
|
950
|
-
|
951
|
-
_checkParam(paramName, attr)
|
952
|
-
# The following is a dirty fix for the RCT case where there
|
953
|
-
# are inner output, maybe we should consider extend this for
|
954
|
-
# in a more general manner
|
955
|
-
for subParam in ['_untilted', '_tilted']:
|
956
|
-
if hasattr(attr, subParam):
|
957
|
-
_checkParam('%s.%s' % (paramName, subParam),
|
958
|
-
getattr(attr, subParam))
|
959
|
-
except Exception as e:
|
960
|
-
print("Cannot read attributes for %s (%s)" % (prot.getClass(), e))
|
961
|
-
|
962
|
-
return objects
|
963
|
-
|
964
|
-
def _checkProtocolsDependencies(self, protocols, msg):
|
965
|
-
""" Check if the protocols have dependencies.
|
966
|
-
This method is used before delete or save protocols to be sure
|
967
|
-
it is not referenced from other runs. (an Exception is raised)
|
968
|
-
Params:
|
969
|
-
protocols: protocol list to be analyzed.
|
970
|
-
msg: String message to be prefixed to Exception error.
|
971
|
-
"""
|
972
|
-
# Check if the protocol have any dependencies
|
973
|
-
error = self._getProtocolsDependencies(protocols)
|
974
|
-
if error:
|
975
|
-
raise ModificationNotAllowedException(msg + error)
|
976
|
-
|
977
|
-
def _checkModificationAllowed(self, protocols, msg):
|
978
|
-
""" Check if any modification operation is allowed for
|
979
|
-
this group of protocols.
|
980
|
-
"""
|
981
|
-
if self.openedAsReadOnly():
|
982
|
-
raise Exception(msg + " Running in READ-ONLY mode.")
|
983
|
-
|
984
|
-
self._checkProtocolsDependencies(protocols, msg)
|
985
|
-
|
986
|
-
def _getSubworkflow(self, protocol, fixProtParam=True, getStopped=True):
|
987
|
-
"""
|
988
|
-
This function get the workflow from "protocol" and determine the
|
989
|
-
protocol level into the graph. Also, checks if there are active
|
990
|
-
protocols excluding interactive protocols.
|
991
|
-
:param protocol from where to start the subworkflow (included)
|
992
|
-
:param fixProtParam fix the old parameters configuration in the protocols
|
993
|
-
:param getStopped takes into account protocols that aren't stopped
|
994
|
-
"""
|
995
|
-
affectedProtocols = {}
|
996
|
-
affectedProtocolsActive = {}
|
997
|
-
auxProtList = []
|
998
|
-
# store the protocol and your level into the workflow
|
999
|
-
affectedProtocols[protocol.getObjId()] = [protocol, 0]
|
1000
|
-
auxProtList.append([protocol.getObjId(), 0])
|
1001
|
-
runGraph = self.getRunsGraph()
|
1002
|
-
|
1003
|
-
while auxProtList:
|
1004
|
-
protId, level = auxProtList.pop(0)
|
1005
|
-
protocol = runGraph.getNode(str(protId)).run
|
1006
|
-
|
1007
|
-
# Increase the level for the children
|
1008
|
-
level = level + 1
|
1009
|
-
|
1010
|
-
if fixProtParam:
|
1011
|
-
self._fixProtParamsConfiguration(protocol)
|
1012
|
-
|
1013
|
-
if not getStopped and protocol.isActive():
|
1014
|
-
affectedProtocolsActive[protocol.getObjId()] = protocol
|
1015
|
-
elif not protocol.getObjId() in affectedProtocolsActive.keys() and getStopped and \
|
1016
|
-
not protocol.isSaved() and protocol.getStatus() != STATUS_INTERACTIVE:
|
1017
|
-
affectedProtocolsActive[protocol.getObjId()] = protocol
|
1018
|
-
|
1019
|
-
node = runGraph.getNode(protocol.strId())
|
1020
|
-
dependencies = [node.run for node in node.getChildren()]
|
1021
|
-
for dep in dependencies:
|
1022
|
-
if not dep.getObjId() in auxProtList:
|
1023
|
-
auxProtList.append([dep.getObjId(), level])
|
1024
|
-
|
1025
|
-
if not dep.getObjId() in affectedProtocols.keys():
|
1026
|
-
affectedProtocols[dep.getObjId()] = [dep, level]
|
1027
|
-
elif level > affectedProtocols[dep.getObjId()][1]:
|
1028
|
-
affectedProtocols[dep.getObjId()][1] = level
|
1029
|
-
|
1030
|
-
return affectedProtocols, affectedProtocolsActive
|
1031
|
-
|
1032
|
-
def deleteProtocol(self, *protocols):
|
1033
|
-
self._checkModificationAllowed(protocols, 'Cannot DELETE protocols')
|
1034
|
-
|
1035
|
-
for prot in protocols:
|
1036
|
-
# Delete the relations created by this protocol
|
1037
|
-
self.mapper.deleteRelations(prot)
|
1038
|
-
# Delete from protocol from database
|
1039
|
-
self.mapper.delete(prot)
|
1040
|
-
wd = prot.workingDir.get()
|
1041
|
-
|
1042
|
-
if wd.startswith(PROJECT_RUNS):
|
1043
|
-
prot.cleanWorkingDir()
|
1044
|
-
else:
|
1045
|
-
logger.info("Can't delete protocol %s. Its workingDir %s does not starts with %s " % (prot, wd, PROJECT_RUNS))
|
1046
|
-
|
1047
|
-
self.mapper.commit()
|
1048
|
-
|
1049
|
-
def deleteProtocolOutput(self, protocol, output):
|
1050
|
-
""" Delete a given object from the project.
|
1051
|
-
Usually to clean up some outputs.
|
1052
|
-
"""
|
1053
|
-
node = self.getRunsGraph().getNode(protocol.strId())
|
1054
|
-
deps = []
|
1055
|
-
|
1056
|
-
for node in node.getChildren():
|
1057
|
-
for _, inputObj in node.run.iterInputAttributes():
|
1058
|
-
value = inputObj.get()
|
1059
|
-
if (value is not None and
|
1060
|
-
value.getObjId() == output.getObjId() and
|
1061
|
-
not node.run.isSaved()):
|
1062
|
-
deps.append(node.run)
|
1063
|
-
|
1064
|
-
if deps:
|
1065
|
-
error = 'Cannot DELETE Object, it is referenced from:'
|
1066
|
-
for d in deps:
|
1067
|
-
error += '\n - %s' % d.getRunName()
|
1068
|
-
raise Exception(error)
|
1069
|
-
else:
|
1070
|
-
protocol.deleteOutput(output)
|
1071
|
-
pwutils.path.copyFile(self.dbPath, protocol.getDbPath())
|
1072
|
-
|
1073
|
-
def __setProtocolLabel(self, newProt):
|
1074
|
-
""" Set a readable label to a newly created protocol.
|
1075
|
-
We will try to find another existing protocol with the default label
|
1076
|
-
and then use an incremental labeling in parenthesis (<number>++)
|
1077
|
-
"""
|
1078
|
-
defaultLabel = newProt.getClassLabel()
|
1079
|
-
maxSuffix = 0
|
1080
|
-
|
1081
|
-
for prot in self.getRuns(iterate=True, refresh=False):
|
1082
|
-
otherProtLabel = prot.getObjLabel()
|
1083
|
-
m = REGEX_NUMBER_ENDING.match(otherProtLabel)
|
1084
|
-
if m and m.groupdict()['prefix'].strip() == defaultLabel:
|
1085
|
-
stringSuffix = m.groupdict()['number'].strip('(').strip(')')
|
1086
|
-
try:
|
1087
|
-
maxSuffix = max(int(stringSuffix), maxSuffix)
|
1088
|
-
except:
|
1089
|
-
logger.error("Couldn't set protocol's label. %s" % stringSuffix)
|
1090
|
-
elif otherProtLabel == defaultLabel: # When only we have the prefix,
|
1091
|
-
maxSuffix = max(1, maxSuffix) # this REGEX don't match.
|
1092
|
-
|
1093
|
-
if maxSuffix:
|
1094
|
-
protLabel = '%s (%d)' % (defaultLabel, maxSuffix+1)
|
1095
|
-
else:
|
1096
|
-
protLabel = defaultLabel
|
1097
|
-
|
1098
|
-
newProt.setObjLabel(protLabel)
|
1099
|
-
|
1100
|
-
def newProtocol(self, protocolClass, **kwargs):
|
1101
|
-
""" Create a new protocol from a given class. """
|
1102
|
-
newProt = protocolClass(project=self, **kwargs)
|
1103
|
-
# Only set a default label to the protocol if is was not
|
1104
|
-
# set through the kwargs
|
1105
|
-
if not newProt.getObjLabel():
|
1106
|
-
self.__setProtocolLabel(newProt)
|
1107
|
-
|
1108
|
-
newProt.setMapper(self.mapper)
|
1109
|
-
newProt.setProject(self)
|
1110
|
-
|
1111
|
-
return newProt
|
1112
|
-
|
1113
|
-
def __getIOMatches(self, node, childNode):
|
1114
|
-
""" Check if some output of node is used as input in childNode.
|
1115
|
-
Return the list of attribute names that matches.
|
1116
|
-
Used from self.copyProtocol
|
1117
|
-
"""
|
1118
|
-
matches = []
|
1119
|
-
for iKey, iAttr in childNode.run.iterInputAttributes():
|
1120
|
-
# As this point iAttr should be always a Pointer that
|
1121
|
-
# points to the output of other protocol
|
1122
|
-
if iAttr.getObjValue() is node.run:
|
1123
|
-
oKey = iAttr.getExtended()
|
1124
|
-
matches.append((oKey, iKey))
|
1125
|
-
else:
|
1126
|
-
for oKey, oAttr in node.run.iterOutputAttributes():
|
1127
|
-
# If node output is "real" and iAttr is still just a pointer
|
1128
|
-
# the iAttr.get() will return None
|
1129
|
-
pointed = iAttr.get()
|
1130
|
-
if pointed is not None and oAttr.getObjId() == pointed.getObjId():
|
1131
|
-
matches.append((oKey, iKey))
|
1132
|
-
|
1133
|
-
return matches
|
1134
|
-
|
1135
|
-
def __cloneProtocol(self, protocol):
|
1136
|
-
""" Make a copy of the protocol parameters, not outputs.
|
1137
|
-
We will label the new protocol with the same name adding the
|
1138
|
-
parenthesis as follow -> (copy) -> (copy 2) -> (copy 3)
|
1139
|
-
"""
|
1140
|
-
newProt = self.newProtocol(protocol.getClass())
|
1141
|
-
oldProtName = protocol.getRunName()
|
1142
|
-
maxSuffix = 0
|
1143
|
-
|
1144
|
-
# if '(copy...' suffix is not in the old name, we add it in the new name
|
1145
|
-
# and setting the newnumber
|
1146
|
-
mOld = REGEX_NUMBER_ENDING_CP.match(oldProtName)
|
1147
|
-
if mOld:
|
1148
|
-
newProtPrefix = mOld.groupdict()['prefix']
|
1149
|
-
if mOld.groupdict()['number'] == '':
|
1150
|
-
oldNumber = 1
|
1151
|
-
else:
|
1152
|
-
oldNumber = int(mOld.groupdict()['number'])
|
1153
|
-
else:
|
1154
|
-
newProtPrefix = oldProtName + ' (copy'
|
1155
|
-
oldNumber = 0
|
1156
|
-
newNumber = oldNumber + 1
|
1157
|
-
|
1158
|
-
# looking for "<old name> (copy" prefixes in the project and
|
1159
|
-
# setting the newNumber as the maximum+1
|
1160
|
-
for prot in self.getRuns(iterate=True, refresh=False):
|
1161
|
-
otherProtLabel = prot.getObjLabel()
|
1162
|
-
mOther = REGEX_NUMBER_ENDING_CP.match(otherProtLabel)
|
1163
|
-
if mOther and mOther.groupdict()['prefix'] == newProtPrefix:
|
1164
|
-
stringSuffix = mOther.groupdict()['number']
|
1165
|
-
if stringSuffix == '':
|
1166
|
-
stringSuffix = 1
|
1167
|
-
maxSuffix = max(maxSuffix, int(stringSuffix))
|
1168
|
-
if newNumber <= maxSuffix:
|
1169
|
-
newNumber = maxSuffix + 1
|
1170
|
-
|
1171
|
-
# building the new name
|
1172
|
-
if newNumber == 1:
|
1173
|
-
newProtLabel = newProtPrefix + ')'
|
1174
|
-
else:
|
1175
|
-
newProtLabel = '%s %d)' % (newProtPrefix, newNumber)
|
1176
|
-
|
1177
|
-
newProt.setObjLabel(newProtLabel)
|
1178
|
-
newProt.copyDefinitionAttributes(protocol)
|
1179
|
-
newProt.copyAttributes(protocol, 'hostName', '_useQueue', '_queueParams')
|
1180
|
-
newProt.runMode.set(MODE_RESTART)
|
1181
|
-
newProt.cleanExecutionAttributes() # Clean jobIds and Pid; otherwise, this would retain old job IDs and PIDs.
|
1182
|
-
|
1183
|
-
return newProt
|
1184
|
-
|
1185
|
-
def copyProtocol(self, protocol):
|
1186
|
-
""" Make a copy of the protocol,
|
1187
|
-
Return a new instance with copied values. """
|
1188
|
-
result = None
|
1189
|
-
|
1190
|
-
if isinstance(protocol, pwprot.Protocol):
|
1191
|
-
result = self.__cloneProtocol(protocol)
|
1192
|
-
|
1193
|
-
elif isinstance(protocol, list):
|
1194
|
-
# Handle the copy of a list of protocols
|
1195
|
-
# for this case we need to update the references of input/outputs
|
1196
|
-
newDict = {}
|
1197
|
-
|
1198
|
-
for prot in protocol:
|
1199
|
-
newProt = self.__cloneProtocol(prot)
|
1200
|
-
newDict[prot.getObjId()] = newProt
|
1201
|
-
self.saveProtocol(newProt)
|
1202
|
-
|
1203
|
-
g = self.getRunsGraph()
|
1204
|
-
|
1205
|
-
for prot in protocol:
|
1206
|
-
node = g.getNode(prot.strId())
|
1207
|
-
newProt = newDict[prot.getObjId()]
|
1208
|
-
|
1209
|
-
for childNode in node.getChildren():
|
1210
|
-
newChildProt = newDict.get(childNode.run.getObjId(), None)
|
1211
|
-
|
1212
|
-
if newChildProt:
|
1213
|
-
# Get the matches between outputs/inputs of
|
1214
|
-
# node and childNode
|
1215
|
-
matches = self.__getIOMatches(node, childNode)
|
1216
|
-
# For each match, set the pointer and the extend
|
1217
|
-
# attribute to reproduce the dependencies in the
|
1218
|
-
# new workflow
|
1219
|
-
for oKey, iKey in matches:
|
1220
|
-
childPointer = getattr(newChildProt, iKey)
|
1221
|
-
|
1222
|
-
# Scalar with pointer case: If is a scalar with a pointer
|
1223
|
-
if isinstance(childPointer, pwobj.Scalar) and childPointer.hasPointer():
|
1224
|
-
# In this case childPointer becomes the contained Pointer
|
1225
|
-
childPointer = childPointer.getPointer()
|
1226
|
-
|
1227
|
-
elif isinstance(childPointer, pwobj.PointerList):
|
1228
|
-
for p in childPointer:
|
1229
|
-
if p.getObjValue().getObjId() == prot.getObjId():
|
1230
|
-
childPointer = p
|
1231
|
-
childPointer.set(newProt)
|
1232
|
-
childPointer.setExtended(oKey)
|
1233
|
-
self.mapper.store(newChildProt)
|
1234
|
-
|
1235
|
-
self.mapper.commit()
|
1236
|
-
else:
|
1237
|
-
raise Exception("Project.copyProtocol: invalid input protocol ' "
|
1238
|
-
"'type '%s'." % type(protocol))
|
1239
|
-
|
1240
|
-
return result
|
1241
|
-
|
1242
|
-
def getProtocolsDict(self, protocols=None, namesOnly=False):
|
1243
|
-
""" Creates a dict with the information of the given protocols.
|
1244
|
-
|
1245
|
-
:param protocols: list of protocols or None to include all.
|
1246
|
-
:param namesOnly: the output list will contain only the protocol names.
|
1247
|
-
|
1248
|
-
"""
|
1249
|
-
protocols = protocols or self.getRuns()
|
1250
|
-
|
1251
|
-
# If the nameOnly, we will simply return a json list with their names
|
1252
|
-
if namesOnly:
|
1253
|
-
return {i: prot.getClassName() for i, prot in enumerate(protocols)}
|
1254
|
-
|
1255
|
-
# Handle the copy of a list of protocols
|
1256
|
-
# for this case we need to update the references of input/outputs
|
1257
|
-
newDict = OrderedDict()
|
1258
|
-
|
1259
|
-
for prot in protocols:
|
1260
|
-
newDict[prot.getObjId()] = prot.getDefinitionDict()
|
1261
|
-
|
1262
|
-
g = self.getRunsGraph()
|
1263
|
-
|
1264
|
-
for prot in protocols:
|
1265
|
-
protId = prot.getObjId()
|
1266
|
-
node = g.getNode(prot.strId())
|
1267
|
-
|
1268
|
-
for childNode in node.getChildren():
|
1269
|
-
childId = childNode.run.getObjId()
|
1270
|
-
childProt = childNode.run
|
1271
|
-
if childId in newDict:
|
1272
|
-
childDict = newDict[childId]
|
1273
|
-
# Get the matches between outputs/inputs of
|
1274
|
-
# node and childNode
|
1275
|
-
matches = self.__getIOMatches(node, childNode)
|
1276
|
-
for oKey, iKey in matches:
|
1277
|
-
inputAttr = getattr(childProt, iKey)
|
1278
|
-
if isinstance(inputAttr, pwobj.PointerList):
|
1279
|
-
childDict[iKey] = [p.getUniqueId() for p in
|
1280
|
-
inputAttr]
|
1281
|
-
else:
|
1282
|
-
childDict[iKey] = '%s.%s' % (
|
1283
|
-
protId, oKey) # equivalent to pointer.getUniqueId
|
1284
|
-
|
1285
|
-
return newDict
|
1286
|
-
|
1287
|
-
def getProtocolsJson(self, protocols=None, namesOnly=False):
|
1288
|
-
"""
|
1289
|
-
Wraps getProtocolsDict to get a json string
|
1290
|
-
|
1291
|
-
:param protocols: list of protocols or None to include all.
|
1292
|
-
:param namesOnly: the output list will contain only the protocol names.
|
1293
|
-
|
1294
|
-
"""
|
1295
|
-
newDict = self.getProtocolsDict(protocols=protocols, namesOnly=namesOnly)
|
1296
|
-
return json.dumps(list(newDict.values()),
|
1297
|
-
indent=4, separators=(',', ': '))
|
1298
|
-
|
1299
|
-
def exportProtocols(self, protocols, filename):
|
1300
|
-
""" Create a text json file with the info
|
1301
|
-
to import the workflow into another project.
|
1302
|
-
This method is very similar to copyProtocol
|
1303
|
-
|
1304
|
-
:param protocols: a list of protocols to export.
|
1305
|
-
:param filename: the filename where to write the workflow.
|
1306
|
-
|
1307
|
-
"""
|
1308
|
-
jsonStr = self.getProtocolsJson(protocols)
|
1309
|
-
f = open(filename, 'w')
|
1310
|
-
f.write(jsonStr)
|
1311
|
-
f.close()
|
1312
|
-
|
1313
|
-
def loadProtocols(self, filename=None, jsonStr=None):
|
1314
|
-
""" Load protocols generated in the same format as self.exportProtocols.
|
1315
|
-
|
1316
|
-
:param filename: the path of the file where to read the workflow.
|
1317
|
-
:param jsonStr:
|
1318
|
-
|
1319
|
-
Note: either filename or jsonStr should be not None.
|
1320
|
-
|
1321
|
-
"""
|
1322
|
-
importDir = None
|
1323
|
-
if filename:
|
1324
|
-
with open(filename) as f:
|
1325
|
-
importDir = os.path.dirname(filename)
|
1326
|
-
protocolsList = json.load(f)
|
1327
|
-
|
1328
|
-
elif jsonStr:
|
1329
|
-
protocolsList = json.loads(jsonStr)
|
1330
|
-
else:
|
1331
|
-
logger.error("Invalid call to loadProtocols. Either filename or jsonStr has to be passed.")
|
1332
|
-
return
|
1333
|
-
|
1334
|
-
emProtocols = self._domain.getProtocols()
|
1335
|
-
newDict = OrderedDict()
|
1336
|
-
|
1337
|
-
# First iteration: create all protocols and setup parameters
|
1338
|
-
for i, protDict in enumerate(protocolsList):
|
1339
|
-
protClassName = protDict['object.className']
|
1340
|
-
protId = protDict['object.id']
|
1341
|
-
protClass = emProtocols.get(protClassName, None)
|
1342
|
-
|
1343
|
-
if protClass is None:
|
1344
|
-
logger.error("Protocol with class name '%s' not found. Are you missing its plugin?." % protClassName)
|
1345
|
-
else:
|
1346
|
-
protLabel = protDict.get('object.label', None)
|
1347
|
-
prot = self.newProtocol(protClass,
|
1348
|
-
objLabel=protLabel,
|
1349
|
-
objComment=protDict.get('object.comment', None))
|
1350
|
-
protocolsList[i] = prot.processImportDict(protDict, importDir) if importDir else protDict
|
1351
|
-
|
1352
|
-
prot._useQueue.set(protDict.get('_useQueue', pw.Config.SCIPION_USE_QUEUE))
|
1353
|
-
prot._queueParams.set(protDict.get('_queueParams', None))
|
1354
|
-
prot._prerequisites.set(protDict.get('_prerequisites', None))
|
1355
|
-
prot.forceSchedule.set(protDict.get('forceSchedule', False))
|
1356
|
-
newDict[protId] = prot
|
1357
|
-
# This saves the protocol JUST with the common attributes. Is it necessary?
|
1358
|
-
# Actually, if after this the is an error, the protocol appears.
|
1359
|
-
self.saveProtocol(prot)
|
1360
|
-
|
1361
|
-
# Second iteration: update pointers values
|
1362
|
-
def _setPointer(pointer, value):
|
1363
|
-
# Properly setup the pointer value checking if the
|
1364
|
-
# id is already present in the dictionary
|
1365
|
-
# Value to pointers could be None: Partial workflows
|
1366
|
-
if value:
|
1367
|
-
parts = value.split('.')
|
1368
|
-
|
1369
|
-
protId = parts[0]
|
1370
|
-
# Try to get the protocol holding the input form the dictionary
|
1371
|
-
target = newDict.get(protId, None)
|
1372
|
-
|
1373
|
-
if target is None:
|
1374
|
-
# Try to use existing protocol in the project
|
1375
|
-
logger.info("Protocol identifier (%s) not self contained. Looking for it in the project." % protId)
|
1376
|
-
|
1377
|
-
try:
|
1378
|
-
target = self.getProtocol(int(protId), fromRuns=True)
|
1379
|
-
except:
|
1380
|
-
# Not a protocol..
|
1381
|
-
logger.info("%s is not a protocol identifier. Probably a direct pointer created by tests. This case is not considered." % protId)
|
1382
|
-
|
1383
|
-
if target:
|
1384
|
-
logger.info("Linking %s to existing protocol in the project: %s" % (prot, target))
|
1385
|
-
|
1386
|
-
pointer.set(target)
|
1387
|
-
if not pointer.pointsNone():
|
1388
|
-
pointer.setExtendedParts(parts[1:])
|
1389
|
-
|
1390
|
-
def _setPrerequisites(prot):
|
1391
|
-
prerequisites = prot.getPrerequisites()
|
1392
|
-
if prerequisites:
|
1393
|
-
newPrerequisites = []
|
1394
|
-
for prerequisite in prerequisites:
|
1395
|
-
if prerequisite in newDict:
|
1396
|
-
newProtId = newDict[prerequisite].getObjId()
|
1397
|
-
newPrerequisites.append(newProtId)
|
1398
|
-
else:
|
1399
|
-
logger.info('"Wait for" id %s missing: ignored.' % prerequisite)
|
1400
|
-
prot._prerequisites.set(newPrerequisites)
|
1401
|
-
|
1402
|
-
for protDict in protocolsList:
|
1403
|
-
protId = protDict['object.id']
|
1404
|
-
|
1405
|
-
if protId in newDict:
|
1406
|
-
prot = newDict[protId]
|
1407
|
-
_setPrerequisites(prot)
|
1408
|
-
for paramName, attr in prot.iterDefinitionAttributes():
|
1409
|
-
if paramName in protDict:
|
1410
|
-
# If the attribute is a pointer, we should look
|
1411
|
-
# if the id is already in the dictionary and
|
1412
|
-
# set the extended property
|
1413
|
-
if attr.isPointer():
|
1414
|
-
_setPointer(attr, protDict[paramName])
|
1415
|
-
# This case is similar to Pointer, but the values
|
1416
|
-
# is a list and we will setup a pointer for each value
|
1417
|
-
elif isinstance(attr, pwobj.PointerList):
|
1418
|
-
attribute = protDict[paramName]
|
1419
|
-
if attribute is None:
|
1420
|
-
continue
|
1421
|
-
for value in attribute:
|
1422
|
-
p = pwobj.Pointer()
|
1423
|
-
_setPointer(p, value)
|
1424
|
-
attr.append(p)
|
1425
|
-
# For "normal" parameters we just set the string value
|
1426
|
-
else:
|
1427
|
-
try:
|
1428
|
-
attr.set(protDict[paramName])
|
1429
|
-
# Case for Scalars with pointers. So far this will work for Numbers. With Strings (still there are no current examples)
|
1430
|
-
# We will need something different to test if the value look like a pointer: regex? ####.text
|
1431
|
-
except ValueError as e:
|
1432
|
-
newPointer = pwobj.Pointer()
|
1433
|
-
_setPointer(newPointer, protDict[paramName])
|
1434
|
-
attr.setPointer(newPointer)
|
1435
|
-
|
1436
|
-
self.mapper.store(prot)
|
1437
|
-
|
1438
|
-
self.mapper.commit()
|
1439
|
-
|
1440
|
-
return newDict
|
1441
|
-
|
1442
|
-
def saveProtocol(self, protocol):
|
1443
|
-
self._checkModificationAllowed([protocol], 'Cannot SAVE protocol')
|
1444
|
-
|
1445
|
-
if (protocol.isRunning() or protocol.isFinished()
|
1446
|
-
or protocol.isLaunched()):
|
1447
|
-
raise ModificationNotAllowedException('Cannot SAVE a protocol that is %s. '
|
1448
|
-
'Copy it instead.' % protocol.getStatus())
|
1449
|
-
|
1450
|
-
protocol.setStatus(pwprot.STATUS_SAVED)
|
1451
|
-
if protocol.hasObjId():
|
1452
|
-
self._storeProtocol(protocol)
|
1453
|
-
else:
|
1454
|
-
self._setupProtocol(protocol)
|
1455
|
-
|
1456
|
-
def getProtocolFromRuns(self, protId):
|
1457
|
-
""" Returns the protocol with the id=protId from the runs list (memory) or None"""
|
1458
|
-
if self.runs:
|
1459
|
-
for run in self.runs:
|
1460
|
-
if run.getObjId() == protId:
|
1461
|
-
return run
|
1462
|
-
|
1463
|
-
return None
|
1464
|
-
|
1465
|
-
def getProtocol(self, protId, fromRuns=False):
|
1466
|
-
""" Returns the protocol with the id=protId or raises an Exception
|
1467
|
-
|
1468
|
-
:param protId: integer with an existing protocol identifier
|
1469
|
-
:param fromRuns: If true, it tries to get it from the runs list (memory) avoiding querying the db."""
|
1470
|
-
|
1471
|
-
protocol = self.getProtocolFromRuns(protId) if fromRuns else None
|
1472
|
-
|
1473
|
-
if protocol is None:
|
1474
|
-
protocol = self.mapper.selectById(protId)
|
1475
|
-
|
1476
|
-
if not isinstance(protocol, pwprot.Protocol):
|
1477
|
-
raise Exception('>>> ERROR: Invalid protocol id: %d' % protId)
|
1478
|
-
|
1479
|
-
self._setProtocolMapper(protocol)
|
1480
|
-
|
1481
|
-
return protocol
|
1482
|
-
|
1483
|
-
# FIXME: this function just return if a given object exists, not
|
1484
|
-
# if it is a protocol, so it is incorrect judging by the name
|
1485
|
-
# Moreover, a more consistent name (comparing to similar methods)
|
1486
|
-
# would be: hasProtocol
|
1487
|
-
def doesProtocolExists(self, protId):
|
1488
|
-
return self.mapper.exists(protId)
|
1489
|
-
|
1490
|
-
def getProtocolsByClass(self, className):
|
1491
|
-
return self.mapper.selectByClass(className)
|
1492
|
-
|
1493
|
-
def getObject(self, objId):
|
1494
|
-
""" Retrieve an object from the db given its id. """
|
1495
|
-
return self.mapper.selectById(objId)
|
1496
|
-
|
1497
|
-
def _setHostConfig(self, protocol):
|
1498
|
-
""" Set the appropriate host config to the protocol
|
1499
|
-
give its value of 'hostname'
|
1500
|
-
"""
|
1501
|
-
hostName = protocol.getHostName()
|
1502
|
-
hostConfig = self.getHostConfig(hostName)
|
1503
|
-
protocol.setHostConfig(hostConfig)
|
1504
|
-
|
1505
|
-
def _storeProtocol(self, protocol):
|
1506
|
-
# Read only mode
|
1507
|
-
if not self.openedAsReadOnly():
|
1508
|
-
self.mapper.store(protocol)
|
1509
|
-
self.mapper.commit()
|
1510
|
-
|
1511
|
-
def _setProtocolMapper(self, protocol):
|
1512
|
-
""" Set the project and mapper to the protocol. """
|
1513
|
-
|
1514
|
-
# Tolerate loading errors. For support.
|
1515
|
-
# When only having the sqlite, sometime there are exceptions here
|
1516
|
-
# due to the absence of a set.
|
1517
|
-
from pyworkflow.mapper.sqlite import SqliteFlatMapperException
|
1518
|
-
try:
|
1519
|
-
|
1520
|
-
protocol.setProject(self)
|
1521
|
-
protocol.setMapper(self.mapper)
|
1522
|
-
self._setHostConfig(protocol)
|
1523
|
-
|
1524
|
-
except SqliteFlatMapperException:
|
1525
|
-
protocol.addSummaryWarning(
|
1526
|
-
"*Protocol loading problem*: A set related to this "
|
1527
|
-
"protocol couldn't be loaded.")
|
1528
|
-
|
1529
|
-
def _setupProtocol(self, protocol):
|
1530
|
-
"""Insert a new protocol instance in the database"""
|
1531
|
-
|
1532
|
-
# Read only mode
|
1533
|
-
if not self.openedAsReadOnly():
|
1534
|
-
self._storeProtocol(protocol) # Store first to get a proper id
|
1535
|
-
# Set important properties of the protocol
|
1536
|
-
workingDir = self.getProtWorkingDir(protocol)
|
1537
|
-
self._setProtocolMapper(protocol)
|
1538
|
-
|
1539
|
-
protocol.setWorkingDir(self.getPath(PROJECT_RUNS, workingDir))
|
1540
|
-
# Update with changes
|
1541
|
-
self._storeProtocol(protocol)
|
1542
|
-
|
1543
|
-
@staticmethod
|
1544
|
-
def getProtWorkingDir(protocol):
|
1545
|
-
"""
|
1546
|
-
Return the protocol working directory
|
1547
|
-
"""
|
1548
|
-
return "%06d_%s" % (protocol.getObjId(), protocol.getClassName())
|
1549
|
-
|
1550
|
-
def getRuns(self, iterate=False, refresh=True, checkPids=False):
|
1551
|
-
""" Return the existing protocol runs in the project.
|
1552
|
-
"""
|
1553
|
-
if self.runs is None or refresh:
|
1554
|
-
# Close db open connections to db files
|
1555
|
-
if self.runs is not None:
|
1556
|
-
for r in self.runs:
|
1557
|
-
r.closeMappers()
|
1558
|
-
|
1559
|
-
# Use new selectAll Batch
|
1560
|
-
# self.runs = self.mapper.selectAll(iterate=False,
|
1561
|
-
# objectFilter=lambda o: isinstance(o, pwprot.Protocol))
|
1562
|
-
self.runs = self.mapper.selectAllBatch(objectFilter=lambda o: isinstance(o, pwprot.Protocol))
|
1563
|
-
|
1564
|
-
# Invalidate _runsGraph because the runs are updated
|
1565
|
-
self._runsGraph = None
|
1566
|
-
|
1567
|
-
for r in self.runs:
|
1568
|
-
|
1569
|
-
self._setProtocolMapper(r)
|
1570
|
-
|
1571
|
-
# Check for run warnings
|
1572
|
-
r.checkSummaryWarnings()
|
1573
|
-
|
1574
|
-
# Update nodes that are running and were not invoked
|
1575
|
-
# by other protocols
|
1576
|
-
if r.isActive():
|
1577
|
-
if not r.isChild():
|
1578
|
-
self._updateProtocol(r, checkPid=checkPids)
|
1579
|
-
|
1580
|
-
self._annotateLastRunTime(r.endTime)
|
1581
|
-
|
1582
|
-
self.mapper.commit()
|
1583
|
-
|
1584
|
-
return self.runs
|
1585
|
-
|
1586
|
-
def _annotateLastRunTime(self, protLastTS):
|
1587
|
-
""" Sets _lastRunTime for the project if it is after current _lastRunTime"""
|
1588
|
-
try:
|
1589
|
-
if protLastTS is None:
|
1590
|
-
return
|
1591
|
-
|
1592
|
-
if self._lastRunTime is None:
|
1593
|
-
self._lastRunTime = protLastTS
|
1594
|
-
elif self._lastRunTime.datetime() < protLastTS.datetime():
|
1595
|
-
self._lastRunTime = protLastTS
|
1596
|
-
except Exception as e:
|
1597
|
-
return
|
1598
|
-
|
1599
|
-
def needRefresh(self):
|
1600
|
-
""" True if any run is active and its timestamp is older than its
|
1601
|
-
corresponding runs.db
|
1602
|
-
NOTE: If an external script changes the DB this will fail. It uses
|
1603
|
-
only in memory objects."""
|
1604
|
-
for run in self.runs:
|
1605
|
-
if run.isActive():
|
1606
|
-
if not pwprot.isProtocolUpToDate(run):
|
1607
|
-
return True
|
1608
|
-
return False
|
1609
|
-
|
1610
|
-
def checkPid(self, protocol):
|
1611
|
-
""" Check if a running protocol is still alive or not.
|
1612
|
-
The check will only be done for protocols that have not been sent
|
1613
|
-
to a queue system.
|
1614
|
-
"""
|
1615
|
-
from pyworkflow.protocol.launch import _runsLocally
|
1616
|
-
pid = protocol.getPid()
|
1617
|
-
|
1618
|
-
if pid == 0:
|
1619
|
-
return
|
1620
|
-
|
1621
|
-
# Include running and scheduling ones
|
1622
|
-
# Exclude interactive protocols
|
1623
|
-
# NOTE: This may be happening even with successfully finished protocols
|
1624
|
-
# which PID is gone.
|
1625
|
-
if (protocol.isActive() and not protocol.isInteractive() and _runsLocally(protocol)
|
1626
|
-
and not pwutils.isProcessAlive(pid)):
|
1627
|
-
protocol.setFailed("Process %s not found running on the machine. "
|
1628
|
-
"It probably has died or been killed without "
|
1629
|
-
"reporting the status to Scipion. Logs might "
|
1630
|
-
"have information about what happened to this "
|
1631
|
-
"process." % pid)
|
1632
|
-
|
1633
|
-
def checkJobId(self, protocol):
|
1634
|
-
""" Check if a running protocol is still alive or not.
|
1635
|
-
The check will only be done for protocols that have been sent
|
1636
|
-
to a queue system.
|
1637
|
-
"""
|
1638
|
-
|
1639
|
-
if len(protocol.getJobIds()) == 0:
|
1640
|
-
return
|
1641
|
-
|
1642
|
-
jobid = protocol.getJobIds()[0]
|
1643
|
-
hostConfig = protocol.getHostConfig()
|
1644
|
-
|
1645
|
-
if jobid == UNKNOWN_JOBID:
|
1646
|
-
return
|
1647
|
-
|
1648
|
-
# Include running and scheduling ones
|
1649
|
-
# Exclude interactive protocols
|
1650
|
-
# NOTE: This may be happening even with successfully finished protocols
|
1651
|
-
# which PID is gone.
|
1652
|
-
if protocol.isActive() and not protocol.isInteractive():
|
1653
|
-
|
1654
|
-
jobStatus = _checkJobStatus(hostConfig, jobid)
|
1655
|
-
|
1656
|
-
if jobStatus == STATUS_FINISHED:
|
1657
|
-
protocol.setFailed("Process %s not found running on the machine. "
|
1658
|
-
"It probably has died or been killed without "
|
1659
|
-
"reporting the status to Scipion. Logs might "
|
1660
|
-
"have information about what happened to this "
|
1661
|
-
"process." % jobid)
|
1662
|
-
|
1663
|
-
def iterSubclasses(self, classesName, objectFilter=None):
|
1664
|
-
""" Retrieve all objects from the project that are instances
|
1665
|
-
of any of the classes in classesName list.
|
1666
|
-
Params:
|
1667
|
-
classesName: String with commas separated values of classes name.
|
1668
|
-
objectFilter: a filter function to discard some of the retrieved
|
1669
|
-
objects."""
|
1670
|
-
for objClass in classesName.split(","):
|
1671
|
-
for obj in self.mapper.selectByClass(objClass.strip(), iterate=True,
|
1672
|
-
objectFilter=objectFilter):
|
1673
|
-
yield obj
|
1674
|
-
|
1675
|
-
def getRunsGraph(self, refresh=False, checkPids=False):
|
1676
|
-
""" Build a graph taking into account the dependencies between
|
1677
|
-
different runs, ie. which outputs serves as inputs of other protocols.
|
1678
|
-
"""
|
1679
|
-
|
1680
|
-
if refresh or self._runsGraph is None:
|
1681
|
-
runs = [r for r in self.getRuns(refresh=refresh, checkPids=checkPids)
|
1682
|
-
if not r.isChild()]
|
1683
|
-
self._runsGraph = self.getGraphFromRuns(runs)
|
1684
|
-
|
1685
|
-
return self._runsGraph
|
1686
|
-
|
1687
|
-
def getGraphFromRuns(self, runs):
|
1688
|
-
"""
|
1689
|
-
This function will build a dependencies graph from a set
|
1690
|
-
of given runs.
|
1691
|
-
|
1692
|
-
:param runs: The input runs to build the graph
|
1693
|
-
:return: The graph taking into account run dependencies
|
1694
|
-
|
1695
|
-
"""
|
1696
|
-
outputDict = {} # Store the output dict
|
1697
|
-
g = pwutils.Graph(rootName=ROOT_NODE_NAME)
|
1698
|
-
|
1699
|
-
for r in runs:
|
1700
|
-
n = g.createNode(r.strId())
|
1701
|
-
n.run = r
|
1702
|
-
|
1703
|
-
# Legacy protocols do not have a plugin!!
|
1704
|
-
develTxt = ''
|
1705
|
-
plugin = r.getPlugin()
|
1706
|
-
if plugin and plugin.inDevelMode():
|
1707
|
-
develTxt = '* '
|
1708
|
-
|
1709
|
-
n.setLabel('%s%s' % (develTxt, r.getRunName()))
|
1710
|
-
outputDict[r.getObjId()] = n
|
1711
|
-
for _, attr in r.iterOutputAttributes():
|
1712
|
-
# mark this output as produced by r
|
1713
|
-
if attr is None:
|
1714
|
-
logger.warning("Output attribute %s of %s is None" % (_, r))
|
1715
|
-
else:
|
1716
|
-
outputDict[attr.getObjId()] = n
|
1717
|
-
|
1718
|
-
def _checkInputAttr(node, pointed):
|
1719
|
-
""" Check if an attr is registered as output"""
|
1720
|
-
if pointed is not None:
|
1721
|
-
pointedId = pointed.getObjId()
|
1722
|
-
|
1723
|
-
if pointedId in outputDict:
|
1724
|
-
parentNode = outputDict[pointedId]
|
1725
|
-
if parentNode is node:
|
1726
|
-
logger.warning("WARNING: Found a cyclic dependence from node %s to itself, probably a bug. " % pointedId)
|
1727
|
-
else:
|
1728
|
-
parentNode.addChild(node)
|
1729
|
-
if os.environ.get('CHECK_CYCLIC_REDUNDANCY') and self._checkCyclicRedundancy(parentNode, node):
|
1730
|
-
conflictiveNodes = set()
|
1731
|
-
for child in node.getChildren():
|
1732
|
-
if node in child._parents:
|
1733
|
-
child._parents.remove(node)
|
1734
|
-
conflictiveNodes.add(child)
|
1735
|
-
logger.warning("WARNING: Found a cyclic dependence from node %s to %s, probably a bug. "
|
1736
|
-
% (node.getLabel() + '(' + node.getName() + ')',
|
1737
|
-
child.getLabel() + '(' + child.getName() + ')'))
|
1738
|
-
|
1739
|
-
for conflictNode in conflictiveNodes:
|
1740
|
-
node._children.remove(conflictNode)
|
1741
|
-
|
1742
|
-
return False
|
1743
|
-
return True
|
1744
|
-
return False
|
1745
|
-
|
1746
|
-
for r in runs:
|
1747
|
-
node = g.getNode(r.strId())
|
1748
|
-
for _, attr in r.iterInputAttributes():
|
1749
|
-
if attr.hasValue():
|
1750
|
-
pointed = attr.getObjValue()
|
1751
|
-
# Only checking pointed object and its parent, if more
|
1752
|
-
# levels we need to go up to get the correct dependencies
|
1753
|
-
if not _checkInputAttr(node, pointed):
|
1754
|
-
parent = self.mapper.getParent(pointed)
|
1755
|
-
_checkInputAttr(node, parent)
|
1756
|
-
rootNode = g.getRoot()
|
1757
|
-
rootNode.run = None
|
1758
|
-
rootNode.label = ROOT_NODE_NAME
|
1759
|
-
|
1760
|
-
for n in g.getNodes():
|
1761
|
-
if n.isRoot() and n is not rootNode:
|
1762
|
-
rootNode.addChild(n)
|
1763
|
-
return g
|
1764
|
-
|
1765
|
-
@staticmethod
|
1766
|
-
def _checkCyclicRedundancy(parent, child):
|
1767
|
-
visitedNodes = set()
|
1768
|
-
recursionStack = set()
|
1769
|
-
|
1770
|
-
def depthFirstSearch(node):
|
1771
|
-
visitedNodes.add(node)
|
1772
|
-
recursionStack.add(node)
|
1773
|
-
for child in node.getChildren():
|
1774
|
-
if child not in visitedNodes:
|
1775
|
-
if depthFirstSearch(child):
|
1776
|
-
return True
|
1777
|
-
elif child in recursionStack and child != parent:
|
1778
|
-
return True
|
1779
|
-
|
1780
|
-
recursionStack.remove(node)
|
1781
|
-
return False
|
1782
|
-
|
1783
|
-
return depthFirstSearch(child)
|
1784
|
-
|
1785
|
-
|
1786
|
-
def _getRelationGraph(self, relation=pwobj.RELATION_SOURCE, refresh=False):
|
1787
|
-
""" Retrieve objects produced as outputs and
|
1788
|
-
make a graph taking into account the SOURCE relation. """
|
1789
|
-
relations = self.mapper.getRelationsByName(relation)
|
1790
|
-
g = pwutils.Graph(rootName=ROOT_NODE_NAME)
|
1791
|
-
root = g.getRoot()
|
1792
|
-
root.pointer = None
|
1793
|
-
runs = self.getRuns(refresh=refresh)
|
1794
|
-
|
1795
|
-
for r in runs:
|
1796
|
-
for paramName, attr in r.iterOutputAttributes():
|
1797
|
-
p = pwobj.Pointer(r, extended=paramName)
|
1798
|
-
node = g.createNode(p.getUniqueId(), attr.getNameId())
|
1799
|
-
node.pointer = p
|
1800
|
-
# The following alias if for backward compatibility
|
1801
|
-
p2 = pwobj.Pointer(attr)
|
1802
|
-
g.aliasNode(node, p2.getUniqueId())
|
1803
|
-
|
1804
|
-
for rel in relations:
|
1805
|
-
pObj = self.getObject(rel[OBJECT_PARENT_ID])
|
1806
|
-
|
1807
|
-
# Duplicated ...
|
1808
|
-
if pObj is None:
|
1809
|
-
logger.warning("Relation seems to point to a deleted object. "
|
1810
|
-
"%s: %s" % (OBJECT_PARENT_ID, rel[OBJECT_PARENT_ID]))
|
1811
|
-
continue
|
1812
|
-
|
1813
|
-
pExt = rel['object_parent_extended']
|
1814
|
-
pp = pwobj.Pointer(pObj, extended=pExt)
|
1815
|
-
|
1816
|
-
if pObj is None or pp.get() is None:
|
1817
|
-
logger.error("project._getRelationGraph: pointer to parent is "
|
1818
|
-
"None. IGNORING IT.\n")
|
1819
|
-
for key in rel.keys():
|
1820
|
-
logger.info("%s: %s" % (key, rel[key]))
|
1821
|
-
|
1822
|
-
continue
|
1823
|
-
|
1824
|
-
pid = pp.getUniqueId()
|
1825
|
-
parent = g.getNode(pid)
|
1826
|
-
|
1827
|
-
while not parent and pp.hasExtended():
|
1828
|
-
pp.removeExtended()
|
1829
|
-
parent = g.getNode(pp.getUniqueId())
|
1830
|
-
|
1831
|
-
if not parent:
|
1832
|
-
logger.error("project._getRelationGraph: parent Node "
|
1833
|
-
"is None: %s" % pid)
|
1834
|
-
else:
|
1835
|
-
cObj = self.getObject(rel['object_child_id'])
|
1836
|
-
cExt = rel['object_child_extended']
|
1837
|
-
|
1838
|
-
if cObj is not None:
|
1839
|
-
if cObj.isPointer():
|
1840
|
-
cp = cObj
|
1841
|
-
if cExt:
|
1842
|
-
cp.setExtended(cExt)
|
1843
|
-
else:
|
1844
|
-
cp = pwobj.Pointer(cObj, extended=cExt)
|
1845
|
-
child = g.getNode(cp.getUniqueId())
|
1846
|
-
|
1847
|
-
if not child:
|
1848
|
-
logger.error("project._getRelationGraph: child Node "
|
1849
|
-
"is None: %s." % cp.getUniqueId())
|
1850
|
-
logger.error(" parent: %s" % pid)
|
1851
|
-
else:
|
1852
|
-
parent.addChild(child)
|
1853
|
-
else:
|
1854
|
-
logger.error("project._getRelationGraph: child Obj "
|
1855
|
-
"is None, id: %s " % rel['object_child_id'])
|
1856
|
-
logger.error(" parent: %s" % pid)
|
1857
|
-
|
1858
|
-
for n in g.getNodes():
|
1859
|
-
if n.isRoot() and n is not root:
|
1860
|
-
root.addChild(n)
|
1861
|
-
|
1862
|
-
return g
|
1863
|
-
|
1864
|
-
def getSourceChilds(self, obj):
|
1865
|
-
""" Return all the objects have used obj
|
1866
|
-
as a source.
|
1867
|
-
"""
|
1868
|
-
return self.mapper.getRelationChilds(pwobj.RELATION_SOURCE, obj)
|
1869
|
-
|
1870
|
-
def getSourceParents(self, obj):
|
1871
|
-
""" Return all the objects that are SOURCE of this object.
|
1872
|
-
"""
|
1873
|
-
return self.mapper.getRelationParents(pwobj.RELATION_SOURCE, obj)
|
1874
|
-
|
1875
|
-
def getTransformGraph(self, refresh=False):
|
1876
|
-
""" Get the graph from the TRANSFORM relation. """
|
1877
|
-
if refresh or not self._transformGraph:
|
1878
|
-
self._transformGraph = self._getRelationGraph(pwobj.RELATION_TRANSFORM,
|
1879
|
-
refresh)
|
1880
|
-
|
1881
|
-
return self._transformGraph
|
1882
|
-
|
1883
|
-
def getSourceGraph(self, refresh=False):
|
1884
|
-
""" Get the graph from the SOURCE relation. """
|
1885
|
-
if refresh or not self._sourceGraph:
|
1886
|
-
self._sourceGraph = self._getRelationGraph(pwobj.RELATION_SOURCE,
|
1887
|
-
refresh)
|
1888
|
-
|
1889
|
-
return self._sourceGraph
|
1890
|
-
|
1891
|
-
def getRelatedObjects(self, relation, obj, direction=pwobj.RELATION_CHILDS,
|
1892
|
-
refresh=False):
|
1893
|
-
""" Get all objects related to obj by a give relation.
|
1894
|
-
|
1895
|
-
:param relation: the relation name to search for.
|
1896
|
-
:param obj: object from which the relation will be search,
|
1897
|
-
actually not only this, but all other objects connected
|
1898
|
-
to this one by the pwobj.RELATION_TRANSFORM.
|
1899
|
-
:parameter direction: Not used
|
1900
|
-
:param refresh: If True, cached objects will be refreshed
|
1901
|
-
|
1902
|
-
"""
|
1903
|
-
|
1904
|
-
graph = self.getTransformGraph(refresh)
|
1905
|
-
relations = self.mapper.getRelationsByName(relation)
|
1906
|
-
connection = self._getConnectedObjects(obj, graph)
|
1907
|
-
|
1908
|
-
objects = []
|
1909
|
-
objectsDict = {}
|
1910
|
-
|
1911
|
-
for rel in relations:
|
1912
|
-
pObj = self.getObject(rel[OBJECT_PARENT_ID])
|
1913
|
-
|
1914
|
-
if pObj is None:
|
1915
|
-
logger.warning("Relation seems to point to a deleted object. "
|
1916
|
-
"%s: %s" % (OBJECT_PARENT_ID, rel[OBJECT_PARENT_ID]))
|
1917
|
-
continue
|
1918
|
-
pExt = rel['object_parent_extended']
|
1919
|
-
pp = pwobj.Pointer(pObj, extended=pExt)
|
1920
|
-
|
1921
|
-
if pp.getUniqueId() in connection:
|
1922
|
-
cObj = self.getObject(rel['object_child_id'])
|
1923
|
-
cExt = rel['object_child_extended']
|
1924
|
-
cp = pwobj.Pointer(cObj, extended=cExt)
|
1925
|
-
if cp.hasValue() and cp.getUniqueId() not in objectsDict:
|
1926
|
-
objects.append(cp)
|
1927
|
-
objectsDict[cp.getUniqueId()] = True
|
1928
|
-
|
1929
|
-
return objects
|
1930
|
-
|
1931
|
-
def _getConnectedObjects(self, obj, graph):
|
1932
|
-
""" Given a TRANSFORM graph, return the elements that
|
1933
|
-
are connected to an object, either children, ancestors or siblings.
|
1934
|
-
"""
|
1935
|
-
n = graph.getNode(obj.strId())
|
1936
|
-
# Get the oldest ancestor of a node, before reaching the root node
|
1937
|
-
while n is not None and not n.getParent().isRoot():
|
1938
|
-
n = n.getParent()
|
1939
|
-
|
1940
|
-
connection = {}
|
1941
|
-
|
1942
|
-
if n is not None:
|
1943
|
-
# Iterate recursively all descendants
|
1944
|
-
for node in n.iterChildren():
|
1945
|
-
connection[node.pointer.getUniqueId()] = True
|
1946
|
-
# Add also
|
1947
|
-
connection[node.pointer.get().strId()] = True
|
1948
|
-
|
1949
|
-
return connection
|
1950
|
-
|
1951
|
-
def isReadOnly(self):
|
1952
|
-
if getattr(self, 'settings', None) is None:
|
1953
|
-
return False
|
1954
|
-
|
1955
|
-
return self.settings.getReadOnly()
|
1956
|
-
|
1957
|
-
def isInReadOnlyFolder(self):
|
1958
|
-
return self._isInReadOnlyFolder
|
1959
|
-
|
1960
|
-
def openedAsReadOnly(self):
|
1961
|
-
return self.isReadOnly() or self.isInReadOnlyFolder()
|
1962
|
-
|
1963
|
-
def setReadOnly(self, value):
|
1964
|
-
self.settings.setReadOnly(value)
|
1965
|
-
|
1966
|
-
def fixLinks(self, searchDir):
|
1967
|
-
logger.info(f"Fixing links for project {self.getShortName()}. Searching in: {searchDir}")
|
1968
|
-
runs = self.getRuns()
|
1969
|
-
|
1970
|
-
counter = 0
|
1971
|
-
for prot in runs:
|
1972
|
-
if prot.getClassName().startswith("ProtImport"):
|
1973
|
-
runName = prot.getRunName()
|
1974
|
-
logger.info(f"Found protocol {runName}")
|
1975
|
-
for f in prot.getOutputFiles():
|
1976
|
-
if ':' in f:
|
1977
|
-
f = f.split(':')[0]
|
1978
|
-
|
1979
|
-
if not os.path.exists(f):
|
1980
|
-
logger.info(f"\tMissing link: {f}")
|
1981
|
-
|
1982
|
-
if os.path.islink(f):
|
1983
|
-
sourceFile = os.path.realpath(f)
|
1984
|
-
newFile = pwutils.findFileRecursive(os.path.basename(sourceFile),
|
1985
|
-
searchDir)
|
1986
|
-
if newFile:
|
1987
|
-
counter += 1
|
1988
|
-
logger.info(f"\t\tCreating link: {f} -> {newFile}")
|
1989
|
-
pwutils.createAbsLink(newFile, f)
|
1990
|
-
|
1991
|
-
logger.info(f"Fixed {counter} broken links")
|
1992
|
-
|
1993
|
-
@staticmethod
|
1994
|
-
def cleanProjectName(projectName):
|
1995
|
-
""" Cleans a project name to avoid common errors
|
1996
|
-
Use it whenever you want to get the final project name pyworkflow will end up.
|
1997
|
-
Spaces will be replaced by _ """
|
1998
|
-
|
1999
|
-
return re.sub(r"[^\w\d\-\_]", "-", projectName)
|
2000
|
-
|
2001
|
-
|
2002
|
-
class MissingProjectDbException(Exception):
|
2003
|
-
pass
|
2004
|
-
|
2005
|
-
|
2006
|
-
class ModificationNotAllowedException(Exception):
|
2007
|
-
pass
|