scipion-pyworkflow 3.11.0__py3-none-any.whl → 3.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. pyworkflow/apps/__init__.py +29 -0
  2. pyworkflow/apps/pw_manager.py +37 -0
  3. pyworkflow/apps/pw_plot.py +51 -0
  4. pyworkflow/apps/pw_project.py +130 -0
  5. pyworkflow/apps/pw_protocol_list.py +143 -0
  6. pyworkflow/apps/pw_protocol_run.py +51 -0
  7. pyworkflow/apps/pw_run_tests.py +268 -0
  8. pyworkflow/apps/pw_schedule_run.py +322 -0
  9. pyworkflow/apps/pw_sleep.py +37 -0
  10. pyworkflow/apps/pw_sync_data.py +440 -0
  11. pyworkflow/apps/pw_viewer.py +78 -0
  12. pyworkflow/constants.py +1 -1
  13. pyworkflow/gui/__init__.py +36 -0
  14. pyworkflow/gui/browser.py +768 -0
  15. pyworkflow/gui/canvas.py +1190 -0
  16. pyworkflow/gui/dialog.py +981 -0
  17. pyworkflow/gui/form.py +2727 -0
  18. pyworkflow/gui/graph.py +247 -0
  19. pyworkflow/gui/graph_layout.py +271 -0
  20. pyworkflow/gui/gui.py +571 -0
  21. pyworkflow/gui/matplotlib_image.py +233 -0
  22. pyworkflow/gui/plotter.py +247 -0
  23. pyworkflow/gui/project/__init__.py +25 -0
  24. pyworkflow/gui/project/base.py +193 -0
  25. pyworkflow/gui/project/constants.py +139 -0
  26. pyworkflow/gui/project/labels.py +205 -0
  27. pyworkflow/gui/project/project.py +491 -0
  28. pyworkflow/gui/project/searchprotocol.py +240 -0
  29. pyworkflow/gui/project/searchrun.py +181 -0
  30. pyworkflow/gui/project/steps.py +171 -0
  31. pyworkflow/gui/project/utils.py +332 -0
  32. pyworkflow/gui/project/variables.py +179 -0
  33. pyworkflow/gui/project/viewdata.py +472 -0
  34. pyworkflow/gui/project/viewprojects.py +519 -0
  35. pyworkflow/gui/project/viewprotocols.py +2141 -0
  36. pyworkflow/gui/project/viewprotocols_extra.py +562 -0
  37. pyworkflow/gui/text.py +774 -0
  38. pyworkflow/gui/tooltip.py +185 -0
  39. pyworkflow/gui/tree.py +684 -0
  40. pyworkflow/gui/widgets.py +307 -0
  41. pyworkflow/mapper/__init__.py +26 -0
  42. pyworkflow/mapper/mapper.py +226 -0
  43. pyworkflow/mapper/sqlite.py +1583 -0
  44. pyworkflow/mapper/sqlite_db.py +145 -0
  45. pyworkflow/object.py +1 -0
  46. pyworkflow/plugin.py +4 -4
  47. pyworkflow/project/__init__.py +31 -0
  48. pyworkflow/project/config.py +454 -0
  49. pyworkflow/project/manager.py +180 -0
  50. pyworkflow/project/project.py +2095 -0
  51. pyworkflow/project/usage.py +165 -0
  52. pyworkflow/protocol/__init__.py +38 -0
  53. pyworkflow/protocol/bibtex.py +48 -0
  54. pyworkflow/protocol/constants.py +87 -0
  55. pyworkflow/protocol/executor.py +515 -0
  56. pyworkflow/protocol/hosts.py +318 -0
  57. pyworkflow/protocol/launch.py +277 -0
  58. pyworkflow/protocol/package.py +42 -0
  59. pyworkflow/protocol/params.py +781 -0
  60. pyworkflow/protocol/protocol.py +2712 -0
  61. pyworkflow/resources/protlabels.xcf +0 -0
  62. pyworkflow/resources/sprites.png +0 -0
  63. pyworkflow/resources/sprites.xcf +0 -0
  64. pyworkflow/template.py +1 -1
  65. pyworkflow/tests/__init__.py +29 -0
  66. pyworkflow/tests/test_utils.py +25 -0
  67. pyworkflow/tests/tests.py +342 -0
  68. pyworkflow/utils/__init__.py +38 -0
  69. pyworkflow/utils/dataset.py +414 -0
  70. pyworkflow/utils/echo.py +104 -0
  71. pyworkflow/utils/graph.py +169 -0
  72. pyworkflow/utils/log.py +293 -0
  73. pyworkflow/utils/path.py +528 -0
  74. pyworkflow/utils/process.py +154 -0
  75. pyworkflow/utils/profiler.py +92 -0
  76. pyworkflow/utils/progressbar.py +154 -0
  77. pyworkflow/utils/properties.py +618 -0
  78. pyworkflow/utils/reflection.py +129 -0
  79. pyworkflow/utils/utils.py +880 -0
  80. pyworkflow/utils/which.py +229 -0
  81. pyworkflow/webservices/__init__.py +8 -0
  82. pyworkflow/webservices/config.py +8 -0
  83. pyworkflow/webservices/notifier.py +152 -0
  84. pyworkflow/webservices/repository.py +59 -0
  85. pyworkflow/webservices/workflowhub.py +86 -0
  86. pyworkflowtests/tests/__init__.py +0 -0
  87. pyworkflowtests/tests/test_canvas.py +72 -0
  88. pyworkflowtests/tests/test_domain.py +45 -0
  89. pyworkflowtests/tests/test_logs.py +74 -0
  90. pyworkflowtests/tests/test_mappers.py +392 -0
  91. pyworkflowtests/tests/test_object.py +507 -0
  92. pyworkflowtests/tests/test_project.py +42 -0
  93. pyworkflowtests/tests/test_protocol_execution.py +146 -0
  94. pyworkflowtests/tests/test_protocol_export.py +78 -0
  95. pyworkflowtests/tests/test_protocol_output.py +158 -0
  96. pyworkflowtests/tests/test_streaming.py +47 -0
  97. pyworkflowtests/tests/test_utils.py +210 -0
  98. {scipion_pyworkflow-3.11.0.dist-info → scipion_pyworkflow-3.11.2.dist-info}/METADATA +2 -2
  99. scipion_pyworkflow-3.11.2.dist-info/RECORD +162 -0
  100. scipion_pyworkflow-3.11.0.dist-info/RECORD +0 -71
  101. {scipion_pyworkflow-3.11.0.dist-info → scipion_pyworkflow-3.11.2.dist-info}/WHEEL +0 -0
  102. {scipion_pyworkflow-3.11.0.dist-info → scipion_pyworkflow-3.11.2.dist-info}/entry_points.txt +0 -0
  103. {scipion_pyworkflow-3.11.0.dist-info → scipion_pyworkflow-3.11.2.dist-info}/licenses/LICENSE.txt +0 -0
  104. {scipion_pyworkflow-3.11.0.dist-info → scipion_pyworkflow-3.11.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2712 @@
1
+ # **************************************************************************
2
+ # *
3
+ # * Authors: J.M. De la Rosa Trevin (delarosatrevin@scilifelab.se) [1]
4
+ # *
5
+ # * [1] SciLifeLab, Stockholm University
6
+ # *
7
+ # * This program is free software: you can redistribute it and/or modify
8
+ # * it under the terms of the GNU General Public License as published by
9
+ # * the Free Software Foundation, either version 3 of the License, or
10
+ # * (at your option) any later version.
11
+ # *
12
+ # * This program is distributed in the hope that it will be useful,
13
+ # * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
+ # * GNU General Public License for more details.
16
+ # *
17
+ # * You should have received a copy of the GNU General Public License
18
+ # * along with this program. If not, see <https://www.gnu.org/licenses/>.
19
+ # *
20
+ # * All comments concerning this program package may be sent to the
21
+ # * e-mail address 'scipion@cnb.csic.es'
22
+ # *
23
+ # **************************************************************************
24
+ """
25
+ This modules contains classes required for the workflow
26
+ execution and tracking like: Step and Protocol
27
+ """
28
+ import os
29
+ import json
30
+ import sys
31
+ import threading
32
+ import time
33
+ from datetime import datetime
34
+
35
+ import pyworkflow as pw
36
+ from pyworkflow.exceptions import ValidationException, PyworkflowException
37
+ from pyworkflow.object import *
38
+ import pyworkflow.utils as pwutils
39
+ from pyworkflow.utils.log import getExtraLogInfo, STATUS, setDefaultLoggingContext
40
+ from pyworkflow.constants import PLUGIN_MODULE_VAR, QUEUE_FOR_JOBS
41
+ from .executor import StepExecutor, ThreadStepExecutor, QueueStepExecutor
42
+ from .constants import *
43
+ from .params import Form, IntParam
44
+ from ..utils import getFileSize
45
+
46
+
47
+ import logging
48
+
49
+ # Get the root logger
50
+ logger = logging.getLogger(__name__)
51
+
52
+
53
+ class Step(Object):
54
+ """ Basic execution unit.
55
+ It should define its Input, Output
56
+ and define a run method.
57
+ """
58
+
59
+ def __init__(self, interactive=False, needsGPU=True, **kwargs):
60
+ super().__init__()
61
+ self._prerequisites = CsvList() # which steps needs to be done first
62
+ self.status = String()
63
+ self.initTime = String()
64
+ self.endTime = String()
65
+ self._error = String()
66
+ self.interactive = Boolean(interactive)
67
+ self._resultFiles = String()
68
+ self._needsGPU = Boolean(needsGPU)
69
+ self._index = None
70
+
71
+ def needsGPU(self) -> bool:
72
+ return self._needsGPU.get()
73
+
74
+ def getIndex(self):
75
+ return self._index
76
+
77
+ def setIndex(self, newIndex):
78
+ self._index = newIndex
79
+
80
+ def getPrerequisites(self):
81
+ return self._prerequisites
82
+
83
+ def addPrerequisites(self, *newPrerequisites):
84
+ for p in newPrerequisites:
85
+ self._prerequisites.append(p)
86
+
87
+ def setPrerequisites(self, *newPrerequisites):
88
+ self._prerequisites.clear()
89
+ self.addPrerequisites(*newPrerequisites)
90
+
91
+ def _preconditions(self):
92
+ """ Check if the necessary conditions to
93
+ step execution are met"""
94
+ return self._validate() == []
95
+
96
+ def _postconditions(self):
97
+ """ Check if the step have done well its task
98
+ and accomplish its results"""
99
+ return True
100
+
101
+ def _run(self):
102
+ """ This is the function that will do the real job.
103
+ It should be override by sub-classes."""
104
+ pass
105
+
106
+ def setRunning(self):
107
+ """ The the state as STATE_RUNNING and
108
+ set the init and end times.
109
+ """
110
+ self.initTime.set(dt.datetime.now())
111
+ self.endTime.set(None)
112
+ self.status.set(STATUS_RUNNING)
113
+ self._error.set(None) # Clean previous error message
114
+
115
+ def getError(self):
116
+ return self._error
117
+
118
+ def getErrorMessage(self):
119
+ return self.getError().get('')
120
+
121
+ def setFailed(self, msg):
122
+ """ Set the run failed and store an error message. """
123
+ self._finalizeStep(STATUS_FAILED, msg=msg)
124
+
125
+ def setAborted(self):
126
+ """ Set the status to aborted and updates the endTime. """
127
+ self._finalizeStep(STATUS_ABORTED, "Aborted by user.")
128
+
129
+ def setFinished(self):
130
+ """ Set the status to finish updates the end time """
131
+ self._finalizeStep(STATUS_FINISHED)
132
+
133
+ def _finalizeStep(self, status, msg=None):
134
+ """ Closes the step, setting up the endTime and optionally an error message"""
135
+ self.endTime.set(dt.datetime.now())
136
+ if msg:
137
+ self._error.set(msg)
138
+ self.status.set(status)
139
+
140
+ def setSaved(self):
141
+ """ Set the status to saved and updated the endTime. """
142
+ self.initTime.set(None)
143
+ self.endTime.set(None)
144
+ self.status.set(STATUS_SAVED)
145
+ self._error.set(None) # Clean previous error message
146
+
147
+ def getStatus(self):
148
+ return self.status.get(STATUS_NEW)
149
+
150
+ def getElapsedTime(self, default=dt.timedelta()):
151
+ """ Return the time that took to run
152
+ (or the actual running time if still is running )
153
+ """
154
+ elapsed = default
155
+
156
+ if self.initTime.hasValue():
157
+ t1 = self.initTime.datetime()
158
+
159
+ if self.endTime.hasValue():
160
+ t2 = self.endTime.datetime()
161
+ else:
162
+ t2 = dt.datetime.now()
163
+
164
+ elapsed = t2 - t1
165
+
166
+ return elapsed
167
+
168
+ def setStatus(self, value):
169
+ return self.status.set(value)
170
+
171
+ def isNew(self):
172
+ return self.getStatus() == STATUS_NEW
173
+
174
+ def setInteractive(self, value):
175
+ return self.interactive.set(value)
176
+
177
+ def isActive(self):
178
+ return self.getStatus() in ACTIVE_STATUS
179
+
180
+ def isFinished(self):
181
+ return self.getStatus() == STATUS_FINISHED
182
+
183
+ def isRunning(self):
184
+ return self.getStatus() == STATUS_RUNNING
185
+
186
+ def isFailed(self):
187
+ return self.getStatus() == STATUS_FAILED
188
+
189
+ def isSaved(self):
190
+ return self.getStatus() == STATUS_SAVED
191
+
192
+ def isScheduled(self):
193
+ return self.getStatus() == STATUS_SCHEDULED
194
+
195
+ def isAborted(self):
196
+ return self.getStatus() == STATUS_ABORTED
197
+
198
+ def isLaunched(self):
199
+ return self.getStatus() == STATUS_LAUNCHED
200
+
201
+ def isInteractive(self):
202
+ return self.interactive.get()
203
+
204
+ def isWaiting(self):
205
+ return self.getStatus() == STATUS_WAITING
206
+
207
+ def run(self):
208
+ """ Do the job of this step"""
209
+ self.setRunning()
210
+ try:
211
+ self._run()
212
+ self.endTime.set(dt.datetime.now())
213
+ if self.status.get() == STATUS_RUNNING:
214
+ if self.isInteractive():
215
+ # If the Step is interactive, after run
216
+ # it will be waiting for use to mark it as DONE
217
+ status = STATUS_INTERACTIVE
218
+ else:
219
+ status = STATUS_FINISHED
220
+ self.status.set(status)
221
+
222
+ except PyworkflowException as e:
223
+ logger.info(pwutils.redStr(str(e)))
224
+ self.setFailed(str(e))
225
+ except Exception as e:
226
+ self.setFailed(str(e))
227
+ import traceback
228
+ traceback.print_exc()
229
+ # raise #only in development
230
+ # finally:
231
+ # self.endTime.set(dt.datetime.now())
232
+
233
+
234
+ class FunctionStep(Step):
235
+ """ This is a Step wrapper around a normal function
236
+ This class will ease the insertion of Protocol function steps
237
+ through the function _insertFunctionStep"""
238
+
239
+ def __init__(self, func=None, funcName=None, *funcArgs, wait=False, interactive=False, needsGPU=True):
240
+ """
241
+ Params:
242
+ func: the function that will be executed.
243
+ funcName: the name assigned to that function (will be stored)
244
+ *funcArgs: argument list passed to the function (serialized and stored)
245
+ **kwargs: extra parameters.
246
+ """
247
+ super().__init__(interactive=interactive, needsGPU=needsGPU)
248
+ self._func = func # Function should be set before run
249
+ self._args = funcArgs
250
+ self.funcName = String(funcName)
251
+ self.argsStr = String(json.dumps(funcArgs, default=lambda x: None))
252
+ if wait:
253
+ self.setStatus(STATUS_WAITING)
254
+
255
+ def _runFunc(self):
256
+ """ Return the possible result files after running the function. """
257
+ return self._func(*self._args)
258
+
259
+ def _run(self):
260
+ """ Run the function and check the result files if any. """
261
+ resultFiles = self._runFunc()
262
+ if isinstance(resultFiles, str):
263
+ resultFiles = [resultFiles]
264
+ if resultFiles and len(resultFiles):
265
+ missingFiles = pwutils.missingPaths(*resultFiles)
266
+ if len(missingFiles):
267
+ raise Exception('Missing filePaths: ' + ' '.join(missingFiles))
268
+ self._resultFiles.set(json.dumps(resultFiles))
269
+
270
+ def _postconditions(self):
271
+ """ This type of Step, will simply check
272
+ as postconditions that the result filePaths exists"""
273
+ if not self._resultFiles.hasValue():
274
+ return True
275
+ filePaths = json.loads(self._resultFiles.get())
276
+
277
+ return len(pwutils.missingPaths(*filePaths)) == 0
278
+
279
+ def __eq__(self, other):
280
+ """ Compare with other FunctionStep"""
281
+ return (self.funcName == other.funcName and
282
+ self.argsStr == other.argsStr)
283
+
284
+ def __ne__(self, other):
285
+ return not self.__eq__(other)
286
+
287
+ def __str__(self):
288
+ return "%s - %s" % (self._objId ,self.funcName.get())
289
+
290
+
291
+ class RunJobStep(FunctionStep):
292
+ """ This Step will wrapper the commonly used function runJob
293
+ for launching specific programs with some parameters.
294
+ The runJob function should be provided by the protocol
295
+ when inserting a new RunJobStep"""
296
+
297
+ def __init__(self, runJobFunc=None, programName=None, arguments=None,
298
+ resultFiles=[], **kwargs):
299
+ FunctionStep.__init__(self, runJobFunc, 'runJob', programName,
300
+ arguments)
301
+ # Number of mpi and threads used to run the program
302
+ self.__runJob = runJobFunc # Store the current function to run the job
303
+ self.mpi = 1
304
+ self.threads = 1
305
+
306
+ def _runFunc(self):
307
+ """ Wrap around runJob function"""
308
+ # We know that:
309
+ # _func: is the runJob function
310
+ # _args[0]: is the program name
311
+ # _args[1]: is the arguments to the program
312
+ return self._func(None, self._args[0], self._args[1],
313
+ numberOfMpi=self.mpi, numberOfThreads=self.threads)
314
+ # TODO: Add the option to return resultFiles
315
+
316
+ def __str__(self):
317
+ return self._args[0] # return program name
318
+
319
+
320
+ class StepSet(Set):
321
+ """ Special type of Set for storing steps. """
322
+
323
+ def __init__(self, filename=None, prefix='',
324
+ mapperClass=None, **kwargs):
325
+ Set.__init__(self, filename, prefix, mapperClass, classesDict=globals(),
326
+ **kwargs)
327
+
328
+
329
+ class Protocol(Step):
330
+ """ The Protocol is a higher type of Step.
331
+ It also have the inputs, outputs and other Steps properties,
332
+ but contains a list of steps that are executed
333
+ """
334
+
335
+ # Version where protocol appeared first time
336
+ _stepsCheckSecs = pw.Config.getStepsCheckSeconds()
337
+ # Protocol develop status: PROD, BETA, NEW
338
+ _devStatus = pw.PROD
339
+
340
+ """" Possible Outputs:
341
+ This is an optional but recommended attribute to fill.
342
+ It has to be an enum with names being the name of the output and value the class of the output:
343
+
344
+ class MyOutput(enum.Enum):
345
+ outputMicrographs = SetOfMicrographs
346
+ outputMicrographDW = SetOfMovies
347
+
348
+ When defining outputs you can, optionally, use this enum like:
349
+ self._defineOutputs(**{MyOutput.outputMicrographs.name, setOfMics})
350
+ It will help to keep output names consistently
351
+
352
+ Alternative an inline dictionary will work (this is mandatory in case two or more outputs are of the same type):
353
+ _possibleOutputs = {"outputMicrographs" : SetOfMicrographs}
354
+
355
+ For a more fine detailed/dynamic output based on parameters, you can overwrite the getter:
356
+ getPossibleOutputs() in your protocol.
357
+
358
+ """
359
+ _possibleOutputs = None
360
+
361
+ # Cache package and plugin
362
+ _package = None
363
+ _plugin = None
364
+
365
+ # Maybe this property can be inferred from the
366
+ # prerequisites of steps, but is easier to keep it
367
+ stepsExecutionMode = STEPS_SERIAL
368
+
369
+ def modeSerial(self):
370
+ """ Returns true if steps are run one after another"""
371
+ # Maybe this property can be inferred from the
372
+ # prerequisites of steps, but is easier to keep it
373
+ return self.stepsExecutionMode == STEPS_SERIAL
374
+
375
+ def modeParallel(self):
376
+ """ Returns true if steps are run in parallel"""
377
+ return not self.modeSerial()
378
+
379
+ def __init__(self, **kwargs):
380
+ Step.__init__(self, **kwargs)
381
+ self._size = None
382
+ self._steps = [] # List of steps that will be executed
383
+ self._newSteps = False # Boolean to annotate when there are new steps added to the above list. And need persistence.
384
+ # All generated filePaths should be inside workingDir
385
+ self.workingDir = String(kwargs.get('workingDir', '.'))
386
+ self.mapper = kwargs.get('mapper', None)
387
+ self._inputs = []
388
+ self._outputs = CsvList()
389
+ # This flag will be used to annotate it output are already "migrated"
390
+ # and available in the _outputs list. Therefore iterating
391
+ self._useOutputList = Boolean(False)
392
+ # Expert level needs to be defined before parsing params
393
+ self.expertLevel = Integer(kwargs.get('expertLevel', LEVEL_NORMAL))
394
+ self._definition = Form(self)
395
+ self._defineParams(self._definition)
396
+ self._createVarsFromDefinition(**kwargs)
397
+ self._log = logger
398
+ self._buffer = '' # text buffer for reading log files
399
+ # Project to which the protocol belongs
400
+ self._project = kwargs.get('project', None)
401
+ # Filename templates dict that will be used by _getFileName
402
+ self.__filenamesDict = {}
403
+
404
+ # This will be used at project load time to check if
405
+ # we need to update the protocol with the data from run.db
406
+ self.lastUpdateTimeStamp = String()
407
+
408
+ # For non-parallel protocols mpi=1 and threads=1
409
+ # MPIs
410
+ self.allowMpi = hasattr(self, 'numberOfMpi')
411
+ if not self.allowMpi:
412
+ self.numberOfMpi = Integer(1)
413
+
414
+ # Threads
415
+ self.allowThreads = hasattr(self, 'numberOfThreads')
416
+ if not self.allowThreads:
417
+ self.numberOfThreads = Integer(1)
418
+
419
+ # Check if MPI or threads are passed in **kwargs, mainly used in tests
420
+ if 'numberOfMpi' in kwargs:
421
+ self.numberOfMpi.set(kwargs.get('numberOfMpi'))
422
+
423
+ if 'numberOfThreads' in kwargs:
424
+ self.numberOfThreads.set(kwargs.get('numberOfThreads'))
425
+
426
+ if not hasattr(self, 'hostName'):
427
+ self.hostName = String(kwargs.get('hostName', 'localhost'))
428
+
429
+ if not hasattr(self, 'hostFullName'):
430
+ self.hostFullName = String()
431
+
432
+
433
+ # Run mode
434
+ self.runMode = Integer(kwargs.get('runMode', MODE_RESUME))
435
+ # Use queue system?
436
+ self._useQueue = Boolean(pw.Config.SCIPION_USE_QUEUE)
437
+ # Store a json string with queue name
438
+ # and queue parameters (only meaningful if _useQueue=True)
439
+ self._queueParams = String()
440
+ self.queueShown = False
441
+ self._jobId = CsvList() # Store queue job ids
442
+ self._pid = Integer()
443
+ self._stepsExecutor = None
444
+ self._stepsDone = Integer(0)
445
+ self._cpuTime = Integer(0)
446
+ self._numberOfSteps = Integer(0)
447
+ # For visualization
448
+ self.allowHeader = Boolean(True)
449
+ # Create an String variable to allow some protocol to precompute
450
+ # the summary message
451
+ self.summaryVar = String()
452
+ self.methodsVar = String()
453
+ # Create a variable to know if the protocol has expert params
454
+ self._hasExpert = None
455
+
456
+ # Store warnings here
457
+ self.summaryWarnings = []
458
+ # Get a lock for threading execution
459
+ self._lock = threading.RLock() # Recursive locks allows a thread to acquire lock on same object more
460
+ # than one time, thus avoiding deadlock situation. This fixed the concurrency problems we had before.
461
+ self.forceSchedule = Boolean(False)
462
+
463
+
464
+ def getMPIs(self):
465
+ """ Returns the value of MPIs (integer)"""
466
+ return self.numberOfMpi.get()
467
+
468
+ def getScipionThreads(self):
469
+ """ Returns the number of Scipion threads. Not the threads that are argument for programs but those that will
470
+ run steps in parallel. This assumes cls.stepsExecutionMode = STEP_PARALLEL. See Param.addParallelSection"""
471
+ return self.numberOfThreads.get()
472
+
473
+ def getBinThreads(self):
474
+ """ Returns the number of binary threads. An integer to pass as an argument for the binary program integrated.
475
+ See Param.addParallelSection"""
476
+
477
+ if self.modeSerial():
478
+ return self.numberOfThreads.get()
479
+ else:
480
+ return self.binThreads.get()
481
+
482
+ def getTotalThreads(self):
483
+ """ Returns the total number of threads the protocol will need. This may be necessary when clusters require this value"""
484
+ if self.modeSerial():
485
+ # This will be the main thread + the binary threads * mpi ?
486
+ return 1 + self.getTotalBinThreads()
487
+ else:
488
+ # One main thread (included in Scipion threads) plus TotalBinThread time processing steps (Scipion threads -1)
489
+ return 1 + ((self.getScipionThreads()-1)* self.getTotalBinThreads())
490
+
491
+ def getTotalBinThreads(self):
492
+ """ Returns the total number to cores the binary will use: threads * mpis"""
493
+ return self.getBinThreads() * self.getMPIs()
494
+
495
+ def _storeAttributes(self, attrList, attrDict):
496
+ """ Store all attributes in attrDict as
497
+ attributes of self, also store the key in attrList.
498
+ """
499
+ for key, value in attrDict.items():
500
+ if key not in attrList:
501
+ attrList.append(key)
502
+ setattr(self, key, value)
503
+
504
+ def _defineInputs(self, **kwargs):
505
+ """ This function should be used to define
506
+ those attributes considered as Input.
507
+ """
508
+ self._storeAttributes(self._inputs, kwargs)
509
+
510
+ def _defineOutputs(self, **kwargs):
511
+ """ This function should be used to specify
512
+ expected outputs.
513
+ """
514
+ for k, v in kwargs.items():
515
+ if hasattr(self, k):
516
+ self._deleteChild(k, v)
517
+ self._insertChild(k, v)
518
+
519
+ # Store attributes in _output (this does not persist them!)
520
+ self._storeAttributes(self._outputs, kwargs)
521
+
522
+ # Persist outputs list
523
+ self._insertChild("_outputs", self._outputs)
524
+ self._useOutputList.set(True)
525
+ self._insertChild("_useOutputList", self._useOutputList)
526
+
527
+ def _closeOutputSet(self):
528
+ """Close all output set"""
529
+ for outputName, output in self.iterOutputAttributes():
530
+ if isinstance(output, Set) and output.isStreamOpen():
531
+ logger.info("Closing %s output" % outputName)
532
+ self.__tryUpdateOutputSet(outputName, output, state=Set.STREAM_CLOSED)
533
+
534
+ def _updateOutputSet(self, outputName, outputSet,
535
+ state=Set.STREAM_OPEN):
536
+ """ Use this function when updating an Stream output set.
537
+ """
538
+ self.__tryUpdateOutputSet(outputName, outputSet, state)
539
+
540
+ def __tryUpdateOutputSet(self, outputName, outputSet,
541
+ state=Set.STREAM_OPEN, tries=1, firstException=None):
542
+ try:
543
+ # Update the set with the streamState value (either OPEN or CLOSED)
544
+ outputSet.setStreamState(state)
545
+
546
+ if self.hasAttribute(outputName):
547
+ outputSet.write() # Write to commit changes
548
+ outputAttr = getattr(self, outputName)
549
+ # Copy the properties to the object contained in the protocol
550
+ # Default Set.copy ignores some attributes like size or mapperPath.
551
+ # In this case we want all to be copied
552
+ outputAttr.copy(outputSet, copyId=False, ignoreAttrs=[])
553
+ # Persist changes
554
+ self._store(outputAttr)
555
+ else:
556
+ # Here the defineOutputs function will call the write() method
557
+ self._defineOutputs(**{outputName: outputSet})
558
+ self._store(outputSet)
559
+ # Close set database to avoid locking it
560
+ outputSet.close()
561
+
562
+ except Exception as ex:
563
+
564
+ if tries > pw.Config.getUpdateSetAttempts():
565
+ raise BlockingIOError("Can't update %s (output) of %s after %s attempts. Reason: %s. "
566
+ "Concurrency, a non writable file system or a quota exceeded could be among the causes." %
567
+ (outputName, self,tries-1, ex)) from firstException
568
+ else:
569
+ logger.warning("Trying to update %s (output) of protocol %s, attempt=%d: %s " % (outputName, self, tries, ex))
570
+ time.sleep(pw.Config.getUpdateSetAttemptsWait())
571
+ self.__tryUpdateOutputSet(outputName, outputSet, state,
572
+ tries + 1, firstException= ex if tries==1 else firstException)
573
+
574
+ def hasExpert(self):
575
+ """ This function checks if the protocol has
576
+ any expert parameter"""
577
+ if self._hasExpert is None:
578
+ self._hasExpert = False
579
+ for paraName, param in self._definition.iterAllParams():
580
+ if param.isExpert():
581
+ self._hasExpert = True
582
+ break
583
+
584
+ return self._hasExpert
585
+
586
+ def getProject(self):
587
+ return self._project
588
+
589
+ def setProject(self, project):
590
+ self._project = project
591
+
592
+ @staticmethod
593
+ def hasDefinition(cls):
594
+ """ Check if the protocol has some definition.
595
+ This can help to detect "abstract" protocol that
596
+ only serve as base for other, not to be instantiated.
597
+ """
598
+ return hasattr(cls, '_definition')
599
+
600
+ @classmethod
601
+ def isNewDev(cls):
602
+ if cls._devStatus == pw.NEW:
603
+ return True
604
+
605
+ @classmethod
606
+ def isBeta(cls):
607
+ return cls._devStatus == pw.BETA
608
+
609
+ @classmethod
610
+ def isUpdated(cls):
611
+ return cls._devStatus == pw.UPDATED
612
+
613
+ def getDefinition(self):
614
+ """ Access the protocol definition. """
615
+ return self._definition
616
+
617
+ def getParam(self, paramName):
618
+ """ Return a _definition param give its name. """
619
+ return self._definition.getParam(paramName)
620
+
621
+ def getEnumText(self, paramName):
622
+ """ This function will retrieve the text value
623
+ of an enum parameter in the definition, taking the actual value in
624
+ the protocol.
625
+
626
+ :param paramName: the name of the enum param.
627
+
628
+ :returns: the string value corresponding to the enum choice.
629
+
630
+ """
631
+ index = getattr(self, paramName).get()
632
+ return self.getParam(paramName).choices[index]
633
+
634
+ def evalParamCondition(self, paramName):
635
+ """ Eval if the condition of paramName in _definition
636
+ is satisfied with the current values of the protocol attributes.
637
+ """
638
+ return self._definition.evalParamCondition(paramName)
639
+
640
+ def evalExpertLevel(self, paramName):
641
+ """ Return the expert level evaluation for a param with the given name.
642
+ """
643
+ return self.evalParamExpertLevel(self.getParam(paramName))
644
+
645
+ def evalParamExpertLevel(self, param):
646
+ """ Return True if the param has an expert level is less than
647
+ the one for the whole protocol.
648
+ """
649
+ return param.expertLevel.get() <= self.expertLevel.get()
650
+
651
+ def iterDefinitionAttributes(self):
652
+ """ Iterate over all the attributes from definition. """
653
+ for paramName, _ in self._definition.iterParams():
654
+ yield paramName, getattr(self, paramName)
655
+
656
+ def getDefinitionDict(self):
657
+ """ Similar to getObjDict, but only for those
658
+ params that are in the form.
659
+ This function is used for export protocols as json text file.
660
+ """
661
+ d = OrderedDict()
662
+ d['object.className'] = self.getClassName()
663
+ d['object.id'] = self.strId()
664
+ d['object.label'] = self.getObjLabel()
665
+ d['object.comment'] = self.getObjComment()
666
+ d['_useQueue'] = self._useQueue.getObjValue()
667
+ d['_prerequisites'] = self._prerequisites.getObjValue()
668
+
669
+ if self._queueParams:
670
+ d['_queueParams'] = self._queueParams.get()
671
+
672
+ od = self.getObjDict(includePointers=True)
673
+
674
+ for attrName in od:
675
+ if self.getParam(attrName) is not None:
676
+ d[attrName] = od[attrName]
677
+
678
+ return d
679
+
680
+ def processImportDict(self, importDict, importDir):
681
+ """
682
+ This function is used when we import a workflow from a json to process or
683
+ adjust the json data for reproducibility purposes e.g. resolve relative paths
684
+ Params:
685
+ importDict: Dict of the protocol that we got from the json
686
+ importDir: dir of the json we're importing
687
+ """
688
+ return importDict
689
+
690
+ def iterDefinitionSections(self):
691
+ """ Iterate over all the section of the definition. """
692
+ for section in self._definition.iterSections():
693
+ yield section
694
+
695
+ def iterInputAttributes(self):
696
+ """ Iterate over the main input parameters
697
+ of this protocol. Now the input are assumed to be these attribute
698
+ which are pointers and have no condition.
699
+ """
700
+ for key, attr in self.getAttributes():
701
+ if not isinstance(attr, Object):
702
+ raise Exception('Attribute %s have been overwritten to type %s '
703
+ % (key, type(attr)))
704
+ if isinstance(attr, PointerList) and attr.hasValue():
705
+ for item in attr:
706
+ # the same key is returned for all items inside the
707
+ # PointerList, this is used in viewprotocols.py
708
+ # to group them inside the same tree element
709
+ yield key, item
710
+ if attr.isPointer() and attr.hasValue():
711
+ yield key, attr
712
+
713
+ # Consider here scalars with pointers inside
714
+ elif isinstance(attr, Scalar) and attr.hasPointer():
715
+ # Scheduling was stale cause this Scalar with pointers where not returned
716
+ #if attr.get() is not None:
717
+ yield key, attr.getPointer()
718
+
719
+ def iterInputPointers(self):
720
+ """ This function is similar to iterInputAttributes, but it yields
721
+ all input Pointers, independently if they have value or not.
722
+ """
723
+ for key, attr in self.getAttributes():
724
+ if not isinstance(attr, Object):
725
+ raise Exception('Attribute %s have been overwritten to type %s '
726
+ % (key, type(attr)))
727
+ if isinstance(attr, PointerList) and attr.hasValue():
728
+ for item in attr:
729
+ # the same key is returned for all items inside the
730
+ # PointerList, this is used in viewprotocols.py
731
+ # to group them inside the same tree element
732
+ yield key, item
733
+ elif attr.isPointer():
734
+ yield key, attr
735
+
736
+ def getProtocolsToUpdate(self):
737
+ """
738
+ This function returns a list of protocols ids that need to update
739
+ their database to launch this protocol (this method is only used
740
+ when a WORKFLOW is restarted or continued).
741
+ Actions done here are:
742
+
743
+ #. Iterate over the main input Pointer of this protocol
744
+ (here, 3 different cases are analyzed):
745
+
746
+ A #. When the pointer points to a protocol
747
+
748
+ B #. When the pointer points to another object (INDIRECTLY).
749
+ The pointer has an _extended value (new parameters configuration
750
+ in the protocol)
751
+
752
+ C #. When the pointer points to another object (DIRECTLY).
753
+
754
+ - The pointer has not an _extended value (old parameters
755
+ configuration in the protocol)
756
+
757
+ #. The PROTOCOL to which the pointer points is determined and saved in
758
+ the list
759
+
760
+ #. If this pointer points to a set (case B and C):
761
+
762
+ - Iterate over the main attributes of the set
763
+ - if attribute is a pointer then we add the pointed protocol to the ids list
764
+ """
765
+ protocolIds = []
766
+ protocol = None
767
+ for key, attrInput in self.iterInputAttributes():
768
+ outputs = []
769
+ output = attrInput.get()
770
+ if isinstance(output, Protocol): # case A
771
+ protocol = output
772
+ for _, protOutput in protocol.iterOutputAttributes():
773
+ outputs.append(protOutput) # for case A store all the protocols outputs
774
+ else:
775
+ if attrInput.hasExtended(): # case B
776
+ protocol = attrInput.getObjValue()
777
+ else: # case C
778
+
779
+ if self.getProject() is not None:
780
+ protocol = self.getProject().getRunsGraph(refresh=True).getNode(str(output.getObjParentId())).run
781
+ else:
782
+ # This is a problem, since protocols coming from
783
+ # Pointers do not have the _project set.
784
+ # We do not have a clear way to get the protocol if
785
+ # we do not have the project object associated
786
+ # This case implies Direct Pointers to Sets
787
+ # (without extended): hopefully this will only be
788
+ # created from tests
789
+ logger.warning("Can't get %s info from %s."
790
+ " This could render unexpected results when "
791
+ "scheduling protocols. Value: %s" % (key, self, attrInput))
792
+ continue
793
+
794
+ if output is not None:
795
+ outputs.append(output)
796
+
797
+ # If there is output
798
+ if outputs:
799
+ # Iter over all the outputs
800
+ for output in outputs:
801
+ # For each output attribute: Looking for pointers like SetOfCoordinates.micrographs
802
+ for k, attr in output.getAttributes():
803
+ # If it's a pointer
804
+ if isinstance(attr, Pointer):
805
+ logger.debug("Pointer found in output: %s.%s (%s)" % (output, k, attr))
806
+ prot = attr.getObjValue()
807
+ if prot is not None:
808
+ if isinstance(prot, Protocol):
809
+ protocolIds.append(prot.getObjId())
810
+ else:
811
+ logger.warning(f"We have found that {output}.{key} points to {attr} "
812
+ f"and is a direct pointer. Direct pointers are less reliable "
813
+ f"in streaming scenarios. Developers should avoid them.")
814
+
815
+ protocolIds.append(protocol.getObjId())
816
+
817
+ return protocolIds
818
+
819
+ def getInputStatus(self):
820
+ """ Returns if any input pointer is not ready yet and if there is
821
+ any pointer to an open set
822
+ """
823
+ emptyPointers = False
824
+ openSetPointer = False
825
+ emptyInput = False
826
+
827
+ for paramName, attr in self.iterInputPointers():
828
+
829
+ param = self.getParam(paramName)
830
+ # Issue #1597: New data loaded with old code.
831
+ # If the input pointer is not a param:
832
+ # This could happen in backward incompatibility cases,
833
+ # Protocol has an attribute (inputPointer) but class does not define
834
+ # if in the define params.
835
+ if param is None:
836
+ print("%s attribute is not defined as parameter. "
837
+ "This could happen when loading new code with older "
838
+ "scipion versions." % paramName)
839
+ continue
840
+
841
+ condition = self.evalParamCondition(paramName)
842
+
843
+ obj = attr.get()
844
+ if isinstance(obj, Protocol) and obj.getStatus() == STATUS_SAVED: # the pointer points to a protocol
845
+ emptyPointers = True
846
+ if obj is None and attr.hasValue():
847
+ emptyPointers = True
848
+ if condition and obj is None and not param.allowsNull:
849
+ if not attr.hasValue():
850
+ emptyInput = True
851
+
852
+ if not self.worksInStreaming() and isinstance(obj, Set) and obj.isStreamOpen():
853
+ openSetPointer = True
854
+
855
+ return emptyInput, openSetPointer, emptyPointers
856
+
857
+ def iterOutputAttributes(self, outputClass=None, includePossible=False):
858
+ """ Iterate over the outputs produced by this protocol. """
859
+
860
+ iterator = self._iterOutputsNew if self._useOutputList else self._iterOutputsOld
861
+
862
+ hasOutput=False
863
+
864
+ # Iterate through actual outputs
865
+ for key, attr in iterator():
866
+ if outputClass is None or isinstance(attr, outputClass):
867
+ hasOutput = True
868
+ yield key, attr
869
+
870
+ # NOTE: This will only happen in case there is no actual output.
871
+ # There is no need to avoid duplication of actual output and possible output.
872
+ if includePossible and not hasOutput and self.getPossibleOutputs() is not None:
873
+ for possibleOutput in self.getPossibleOutputs():
874
+ if isinstance(possibleOutput, str):
875
+ yield possibleOutput, self._possibleOutputs[possibleOutput]
876
+ else:
877
+ yield possibleOutput.name, possibleOutput.value
878
+
879
+ def getPossibleOutputs(self):
880
+ return self._possibleOutputs
881
+
882
+ def _iterOutputsNew(self):
883
+ """ This methods iterates through a list where outputs have been
884
+ annotated"""
885
+
886
+ # Loop through the output list
887
+ for attrName in self._outputs:
888
+
889
+ # FIX: When deleting manually an output, specially for interactive protocols.
890
+ # The _outputs is properly deleted in projects.sqlite, not it's run.db remains.
891
+ # When the protocol is updated from run.db it brings the outputs that were deleted
892
+ if hasattr(self, attrName):
893
+ # Get it from the protocol
894
+ attr = getattr(self, attrName)
895
+
896
+ yield attrName, attr
897
+ else:
898
+ self._outputs.remove(attrName)
899
+
900
+ def _iterOutputsOld(self):
901
+ """ This method iterates assuming the old model: any EMObject attribute
902
+ is an output."""
903
+ # Iterate old Style:
904
+
905
+ try:
906
+ domain = self.getClassDomain()
907
+ except Exception as e:
908
+ print(e)
909
+ print("Protocol in workingdir ", self.getWorkingDir(), " is of an unknown class")
910
+ print("Maybe the class name has changed")
911
+ return "none", None
912
+
913
+ for key, attr in self.getAttributes():
914
+ if isinstance(attr, domain._objectClass):
915
+ yield key, attr
916
+ return
917
+
918
+ def isInStreaming(self):
919
+ # For the moment let's assume a protocol is in streaming
920
+ # if at least one of the output sets is in STREAM_OPEN state
921
+ for paramName, attr in self.iterOutputAttributes():
922
+ if isinstance(attr, Set):
923
+ if attr.isStreamOpen():
924
+ return True
925
+ return False
926
+
927
+ @classmethod
928
+ def worksInStreaming(cls):
929
+ # A protocol should work in streaming if it implements the stepCheck()
930
+ # Get the stepCheck method from the Protocol
931
+ baseStepCheck = Protocol._stepsCheck
932
+ ownStepCheck = cls._stepsCheck
933
+
934
+ return not pwutils.isSameFunction(baseStepCheck, ownStepCheck)
935
+
936
+ def allowsGpu(self):
937
+ """ Returns True if this protocol allows GPU computation. """
938
+ return self.hasAttribute(GPU_LIST)
939
+
940
+ def requiresGpu(self):
941
+ """ Return True if this protocol can only be executed in GPU. """
942
+ return self.allowsGpu() and not self.hasAttribute(USE_GPU)
943
+
944
+ def usesGpu(self):
945
+ return self.allowsGpu() and self.getAttributeValue(USE_GPU, True)
946
+
947
+ def getGpuList(self):
948
+ if not self.allowsGpu():
949
+ return []
950
+
951
+ return pwutils.getListFromRangeString(self.gpuList.get())
952
+
953
+ def getOutputsSize(self):
954
+ return sum(1 for _ in self.iterOutputAttributes())
955
+
956
+ def getOutputFiles(self):
957
+ """ Return the output files produced by this protocol.
958
+ This can be used in web to download results back.
959
+ """
960
+ # By default return the output file of each output attribute
961
+ s = set()
962
+
963
+ for _, attr in self.iterOutputAttributes():
964
+ s.update(attr.getFiles())
965
+
966
+ return s
967
+
968
+ def getOutputSuffix(self, outputPrefix):
969
+ """ Return the suffix to be used for a new output.
970
+ For example: output3DCoordinates7.
971
+ It should take into account previous outputs
972
+ and number with a higher value.
973
+ """
974
+ maxCounter = -1
975
+ for attrName, _ in self.iterOutputAttributes():
976
+ suffix = attrName.replace(outputPrefix, '')
977
+ try:
978
+ counter = int(suffix)
979
+ except:
980
+ counter = 1 # when there is not number assume 1
981
+ maxCounter = max(counter, maxCounter)
982
+
983
+ return str(maxCounter + 1) if maxCounter > 0 else '' # empty if not output
984
+
985
+ def getNextOutputName(self, outputPrefix):
986
+ """Return the name to be used for a new output."""
987
+ return outputPrefix + self.getOutputSuffix(outputPrefix)
988
+
989
+ def copyDefinitionAttributes(self, other):
990
+ """ Copy definition attributes to other protocol. """
991
+ for paramName, _ in self.iterDefinitionAttributes():
992
+ self.copyAttributes(other, paramName)
993
+
994
+ def _createVarsFromDefinition(self, **kwargs):
995
+ """ This function will setup the protocol instance variables
996
+ from the Protocol Class definition, taking into account
997
+ the variable type and default values.
998
+ """
999
+ if hasattr(self, '_definition'):
1000
+ for paramName, param in self._definition.iterParams():
1001
+ # Create the var with value coming from kwargs or from
1002
+ # the default param definition
1003
+ try:
1004
+ value = kwargs.get(paramName, param.default.get())
1005
+ var = param.paramClass(value=value)
1006
+ setattr(self, paramName, var)
1007
+ except Exception as e:
1008
+ raise ValueError("Can't create parameter '%s' and set it to %s" %
1009
+ (paramName, value)) from e
1010
+ else:
1011
+ print("FIXME: Protocol '%s' has not DEFINITION"
1012
+ % self.getClassName())
1013
+
1014
+ def _getFileName(self, key, **kwargs):
1015
+ """ This function will retrieve filenames given a key and some
1016
+ keywords arguments. The __filenamesDict attribute should be
1017
+ updated with templates that accept the given keys.
1018
+ """
1019
+ return self.__filenamesDict[key] % kwargs
1020
+
1021
+ def _updateFilenamesDict(self, fnDict):
1022
+ """ Update the dictionary with templates that will be used
1023
+ by the _getFileName function.
1024
+ """
1025
+ self.__filenamesDict.update(fnDict)
1026
+
1027
+ def _store(self, *objs):
1028
+ """ Stores objects of the protocol using the mapper.
1029
+ If not objects are passed, the whole protocol is stored.
1030
+ """
1031
+ if self.mapper is not None:
1032
+ with self._lock: # _lock is now a Rlock object (recursive locks)
1033
+ if len(objs) == 0:
1034
+ self.mapper.store(self)
1035
+ else:
1036
+ for obj in objs:
1037
+ self.mapper.store(obj)
1038
+ self.mapper.commit()
1039
+
1040
+ def _insertChild(self, key, child):
1041
+ """ Insert a new child not stored previously.
1042
+ If stored previously, _store should be used.
1043
+ The child will be set as self.key attribute
1044
+ """
1045
+ try:
1046
+ setattr(self, key, child)
1047
+ if self.hasObjId():
1048
+ self.mapper.insertChild(self, key, child)
1049
+ except Exception as ex:
1050
+ print("Error with child '%s', value=%s, type=%s"
1051
+ % (key, child, type(child)))
1052
+ raise ex
1053
+
1054
+ def _deleteChild(self, key, child):
1055
+ """ Delete a child from the mapper. """
1056
+ self.mapper.delete(child)
1057
+
1058
+ def _insertAllSteps(self):
1059
+ """ Define all the steps that will be executed. """
1060
+ pass
1061
+
1062
+ def _defineParams(self, form):
1063
+ """ Define the input parameters that will be used.
1064
+ Params:
1065
+ form: this is the form to be populated with sections and params.
1066
+ """
1067
+ pass
1068
+
1069
+ def __insertStep(self, step, prerequisites=None):
1070
+ """ Insert a new step in the list.
1071
+
1072
+ :param prerequisites: a single integer or a list with the steps index that need to be done
1073
+ previous to the current one."""
1074
+
1075
+ if prerequisites is None:
1076
+ if len(self._steps):
1077
+ # By default add the previous step as prerequisite
1078
+ step.addPrerequisites(len(self._steps))
1079
+ else:
1080
+ # Allow passing just an id
1081
+ if not isinstance(prerequisites, list):
1082
+ prerequisites = [prerequisites]
1083
+
1084
+ step.addPrerequisites(*prerequisites)
1085
+
1086
+ self._steps.append(step)
1087
+ self._newSteps = True
1088
+ # Setup and return step index
1089
+ step.setIndex(len(self._steps))
1090
+
1091
+ return step.getIndex()
1092
+
1093
+ def setRunning(self):
1094
+ """ Do not reset the init time in RESUME_MODE"""
1095
+ previousStart = self.initTime.get()
1096
+ super().setRunning()
1097
+ if self.getRunMode() == MODE_RESUME and previousStart is not None:
1098
+ self.initTime.set(previousStart)
1099
+ else:
1100
+ self._cpuTime.set(0)
1101
+
1102
+ def setAborted(self):
1103
+ """ Abort the protocol, finalize the steps and close all open sets"""
1104
+ try:
1105
+ super().setAborted()
1106
+ self._updateSteps(lambda step: step.setAborted(), where="status='%s'" % STATUS_RUNNING)
1107
+ self._closeOutputSet()
1108
+ except Exception as e:
1109
+ print("An error occurred aborting the protocol (%s)" % e)
1110
+
1111
+ def setFailed(self, msg):
1112
+ """ Set the run failed and close all open sets. """
1113
+ super().setFailed(msg)
1114
+ self._closeOutputSet()
1115
+
1116
+ def _finalizeStep(self, status, msg=None):
1117
+ """ Closes the step and setting up the protocol process id """
1118
+ super()._finalizeStep(status, msg)
1119
+ self._closeOutputSet()
1120
+ self._pid.set(0)
1121
+
1122
+ def _updateSteps(self, updater, where="1"):
1123
+ """Set the status of all steps
1124
+ :parameter updater callback/lambda receiving a step and editing it inside
1125
+ :parameter where condition to filter the set with."""
1126
+ stepsSet = StepSet(filename=self.getStepsFile())
1127
+ for step in stepsSet.iterItems(where=where):
1128
+ updater(step)
1129
+ stepsSet.update(step)
1130
+ stepsSet.write()
1131
+ stepsSet.close() # Close the connection
1132
+
1133
+ def getPath(self, *paths):
1134
+ """ Same as _getPath but without underscore. """
1135
+ return self._getPath(*paths)
1136
+
1137
+ def _getPath(self, *paths):
1138
+ """ Return a path inside the workingDir. """
1139
+ return os.path.join(self.workingDir.get(), *paths)
1140
+
1141
+ def _getExtraPath(self, *paths):
1142
+ """ Return a path inside the extra folder. """
1143
+ return self._getPath("extra", *paths)
1144
+
1145
+ def _getTmpPath(self, *paths):
1146
+ """ Return a path inside the tmp folder. """
1147
+ return self._getPath("tmp", *paths)
1148
+
1149
+ def _getLogsPath(self, *paths):
1150
+ return self._getPath("logs", *paths)
1151
+
1152
+ def _getRelPath(self, *path):
1153
+ """ Return a relative path from the workingDir. """
1154
+ return os.path.relpath(self._getPath(*path), self.workingDir.get())
1155
+
1156
+ def _getRelPathExecutionDir(self, *path):
1157
+ """ Return a relative path from the projdir. """
1158
+ # TODO must be a bettis
1159
+ return os.path.relpath(self._getPath(*path), os.path.dirname(os.path.dirname(self.workingDir.get())))
1160
+
1161
+ def _getBasePath(self, path):
1162
+ """ Take the basename of the path and get the path
1163
+ relative to working dir of the protocol.
1164
+ """
1165
+ return self._getPath(os.path.basename(path))
1166
+
1167
+ def _insertFunctionStep(self, func, *funcArgs, prerequisites=None, wait=False, interactive=False, needsGPU=True):
1168
+ """
1169
+ Params:
1170
+ func: the function itself or, optionally, the name (string) of the function to be run in the Step.
1171
+ *funcArgs: the variable list of arguments to pass to the function.
1172
+ **kwargs: see __insertStep
1173
+ """
1174
+ if isinstance(func, str):
1175
+ # Get the function give its name
1176
+ func = getattr(self, func, None)
1177
+
1178
+ # Ensure the protocol instance have it and is callable
1179
+ if not func:
1180
+ raise Exception("Protocol._insertFunctionStep: '%s' function is "
1181
+ "not member of the protocol" % func)
1182
+ if not callable(func):
1183
+ raise Exception("Protocol._insertFunctionStep: '%s' is not callable"
1184
+ % func)
1185
+ step = FunctionStep(func, func.__name__, *funcArgs, wait=wait, interactive=interactive, needsGPU=needsGPU)
1186
+
1187
+ return self.__insertStep(step,prerequisites)
1188
+
1189
+ def _insertRunJobStep(self, progName, progArguments, resultFiles=[],
1190
+ **kwargs):
1191
+ """ Insert an Step that will simple call runJob function
1192
+ **args: see __insertStep
1193
+ """
1194
+ return self._insertFunctionStep('runJob', progName, progArguments,
1195
+ **kwargs)
1196
+
1197
+ def _insertCopyFileStep(self, sourceFile, targetFile, **kwargs):
1198
+ """ Shortcut function to insert a step for copying a file to a destiny. """
1199
+ step = FunctionStep(pwutils.copyFile, 'copyFile', sourceFile,
1200
+ targetFile,
1201
+ **kwargs)
1202
+ return self.__insertStep(step, **kwargs)
1203
+
1204
+ def _enterDir(self, path):
1205
+ """ Enter into a new directory path and store the current path.
1206
+ The current path will be used in _leaveDir, but nested _enterDir
1207
+ are not allowed since self._currentDir is overwritten.
1208
+ """
1209
+ self._currentDir = os.getcwd()
1210
+ os.chdir(path)
1211
+ if self._log:
1212
+ self._log.info("Entered into dir: cd '%s'" % path)
1213
+
1214
+ def _leaveDir(self):
1215
+ """ This method should be called after a call to _enterDir
1216
+ to return to the previous location.
1217
+ """
1218
+ os.chdir(self._currentDir)
1219
+ if self._log:
1220
+ self._log.info("Returned to dir: cd '%s'" % self._currentDir)
1221
+
1222
+ def _enterWorkingDir(self):
1223
+ """ Change to the protocol working dir. """
1224
+ self._enterDir(self.workingDir.get())
1225
+
1226
+ def _leaveWorkingDir(self):
1227
+ """ This function make sense to use in conjunction
1228
+ with _enterWorkingDir to go back to execution path.
1229
+ """
1230
+ self._leaveDir()
1231
+
1232
+ def continueFromInteractive(self):
1233
+ """ TODO: REMOVE this function.
1234
+ Check if there is an interactive step and set
1235
+ as finished, this is used now mainly in picking,
1236
+ but we should remove this since is weird for users.
1237
+ """
1238
+ if os.path.exists(self.getStepsFile()):
1239
+ stepsSet = StepSet(filename=self.getStepsFile())
1240
+ for step in stepsSet:
1241
+ if step.getStatus() == STATUS_INTERACTIVE:
1242
+ step.setStatus(STATUS_FINISHED)
1243
+ stepsSet.update(step)
1244
+ break
1245
+ stepsSet.write()
1246
+ stepsSet.close() # Close the connection
1247
+
1248
+ def loadSteps(self):
1249
+ """ Load the Steps stored in the steps.sqlite file.
1250
+ """
1251
+ prevSteps = []
1252
+
1253
+ if os.path.exists(self.getStepsFile()):
1254
+ stepsSet = StepSet(filename=self.getStepsFile())
1255
+ for step in stepsSet:
1256
+ prevSteps.append(step.clone())
1257
+ stepsSet.close() # Close the connection
1258
+ return prevSteps
1259
+
1260
+ def _insertPreviousSteps(self):
1261
+ """ Insert steps of previous execution.
1262
+ It can be used to track previous steps done for
1263
+ protocol that allow some kind of continue (such as ctf estimation).
1264
+ """
1265
+ for step in self.loadSteps():
1266
+ self.__insertStep(step, )
1267
+
1268
+ def __updateDoneSteps(self):
1269
+ """ From a previous run, compare self._steps and self._prevSteps
1270
+ to find which steps we need to execute, skipping successful done
1271
+ and not changed steps. Steps that needs to be done, will be deleted
1272
+ from the previous run storage.
1273
+ """
1274
+ doneSteps = 0
1275
+ if self.runMode == MODE_RESTART:
1276
+ self._prevSteps = []
1277
+ return doneSteps
1278
+
1279
+ self._prevSteps = self.loadSteps()
1280
+
1281
+ n = min(len(self._steps), len(self._prevSteps))
1282
+ self.debug("len(steps) %s len(prevSteps) %s "
1283
+ % (len(self._steps), len(self._prevSteps)))
1284
+
1285
+ for i in range(n):
1286
+ newStep = self._steps[i]
1287
+ oldStep = self._prevSteps[i]
1288
+ if (not oldStep.isFinished() or newStep != oldStep
1289
+ or not oldStep._postconditions()):
1290
+ if pw.Config.debugOn():
1291
+ self.info("Rerunning step %d" % i)
1292
+ if not oldStep.isFinished():
1293
+ self.info(" Old step: %s, args: %s was not finished"
1294
+ % (oldStep.funcName, oldStep.argsStr))
1295
+ elif newStep != oldStep:
1296
+ self.info(" New step: %s, args: %s is different"
1297
+ % (newStep.funcName, newStep.argsStr))
1298
+ elif not oldStep._postconditions():
1299
+ self.info(" Old step: %s, args: %s postconditions were not met"
1300
+ % (oldStep.funcName, oldStep.argsStr))
1301
+
1302
+ else:
1303
+ doneSteps += 1
1304
+ # If the step has not changed and is properly finished, it is copied to the new steps so it is not
1305
+ # executed again
1306
+ newStep.copy(oldStep)
1307
+
1308
+ return doneSteps
1309
+
1310
+ def _storeSteps(self):
1311
+ """ Store the new steps list that can be retrieved
1312
+ in further execution of this protocol.
1313
+ """
1314
+ stepsFn = self.getStepsFile()
1315
+
1316
+ self._stepsSet = StepSet(filename=stepsFn)
1317
+ self._stepsSet.setStore(False)
1318
+ self._stepsSet.clear()
1319
+
1320
+ for step in self._steps:
1321
+ step.cleanObjId()
1322
+ self.setInteractive(self.isInteractive() or step.isInteractive())
1323
+ self._stepsSet.append(step)
1324
+
1325
+ self._stepsSet.write()
1326
+
1327
+ def __updateStep(self, step):
1328
+ """ Store a given step and write changes. """
1329
+ self._stepsSet.update(step)
1330
+ self._stepsSet.write()
1331
+
1332
+ def _stepStarted(self, step):
1333
+ """This function will be called whenever an step
1334
+ has started running.
1335
+ """
1336
+ self.info(pwutils.magentaStr("STARTED") + ": %s, step %d, time %s" %
1337
+ (step.funcName.get(), step._index, step.initTime.datetime()),
1338
+ extra=getExtraLogInfo("PROTOCOL", STATUS.START,
1339
+ project_name=self.getProject().getName(),
1340
+ prot_id=self.getObjId(),
1341
+ prot_name=self.getClassName(),
1342
+ step_id=step._index))
1343
+ self.__updateStep(step)
1344
+
1345
+ def _stepFinished(self, step):
1346
+ """This function will be called whenever an step
1347
+ has finished its run.
1348
+ """
1349
+ doContinue = True
1350
+ if step.isInteractive():
1351
+ doContinue = False
1352
+ elif step.isFailed():
1353
+ doContinue = False
1354
+ errorMsg = pwutils.redStr(
1355
+ "Protocol failed: " + step.getErrorMessage())
1356
+ self.setFailed(errorMsg)
1357
+ self.error(errorMsg)
1358
+ self.lastStatus = step.getStatus()
1359
+
1360
+ self.__updateStep(step)
1361
+ self._stepsDone.increment()
1362
+ self._cpuTime.set(self._cpuTime.get() + step.getElapsedTime().total_seconds())
1363
+ self._store(self._stepsDone, self._cpuTime)
1364
+
1365
+ self.info(pwutils.magentaStr(step.getStatus().upper()) + ": %s, step %d, time %s"
1366
+ % (step.funcName.get(), step._index, step.endTime.datetime()),
1367
+ extra=getExtraLogInfo("PROTOCOL",STATUS.STOP,
1368
+ project_name=self.getProject().getName(),
1369
+ prot_id=self.getObjId(),
1370
+ prot_name=self.getClassName(),
1371
+ step_id=step._index))
1372
+ if step.isFailed() and self.modeParallel():
1373
+ # In parallel mode the executor will exit to close
1374
+ # all working threads, so we need to close
1375
+ self._endRun()
1376
+ return doContinue
1377
+
1378
+ def _stepsCheck(self):
1379
+ pass
1380
+
1381
+ def _runSteps(self, doneSteps):
1382
+ """ Run all steps defined in self._steps. """
1383
+ self._stepsDone.set(doneSteps)
1384
+ self._numberOfSteps.set(len(self._steps))
1385
+ self.setRunning()
1386
+ # Keep the original value to set in sub-protocols
1387
+ self._originalRunMode = self.runMode.get()
1388
+ # Always set to resume, even if set to restart
1389
+ self.runMode.set(MODE_RESUME)
1390
+ self._store()
1391
+
1392
+ if doneSteps == len(self._steps):
1393
+ self.lastStatus = STATUS_FINISHED
1394
+ self.setFinished()
1395
+ self.info("All steps seem to be FINISHED, nothing to be done.")
1396
+ else:
1397
+ self.lastStatus = self.status.get()
1398
+ self._stepsExecutor.runSteps(self._steps,
1399
+ self._stepStarted,
1400
+ self._stepFinished,
1401
+ self._stepsCheck,
1402
+ self._stepsCheckSecs)
1403
+
1404
+ logger.info("*** Last status is %s " % self.lastStatus)
1405
+ self.setStatus(self.lastStatus)
1406
+ self.cleanExecutionAttributes(includeSteps=False)
1407
+ self._store(self.status)
1408
+
1409
+ def __deleteOutputs(self):
1410
+ """ This function should only be used from RESTART.
1411
+ It will remove output attributes from mapper and object.
1412
+ """
1413
+ attributes = [a[0] for a in self.iterOutputAttributes()]
1414
+
1415
+ for attrName in attributes:
1416
+ attr = getattr(self, attrName)
1417
+ self.mapper.delete(attr)
1418
+ delattr(self, attrName)
1419
+
1420
+ self._outputs.clear()
1421
+ self.mapper.store(self._outputs)
1422
+
1423
+ def findAttributeName(self, attr2Find):
1424
+ for attrName, attr in self.iterOutputAttributes():
1425
+ if attr.getObjId() == attr2Find.getObjId():
1426
+ return attrName
1427
+ return None
1428
+
1429
+ def deleteOutput(self, output):
1430
+ attrName = self.findAttributeName(output)
1431
+ self.mapper.delete(output)
1432
+ delattr(self,attrName)
1433
+ if attrName in self._outputs:
1434
+ self._outputs.remove(attrName)
1435
+ self.mapper.store(self._outputs)
1436
+ self.mapper.commit()
1437
+
1438
+ def __copyRelations(self, other):
1439
+ """ This will copy relations from protocol other to self """
1440
+ pass
1441
+
1442
+ def copy(self, other, copyId=True, excludeInputs=False):
1443
+ """
1444
+ Copies its attributes into the passed protocol
1445
+
1446
+ :param other: protocol instance to copt the attributes to
1447
+ :param copyId: True (default) copies the identifier
1448
+ :param excludeInputs: False (default). If true input attributes are excluded
1449
+
1450
+ """
1451
+
1452
+ # Input attributes list
1453
+ inputAttributes = []
1454
+
1455
+ # If need to exclude input attributes
1456
+ if excludeInputs:
1457
+ # Get all the input attributes, to be ignored at copy():
1458
+ for key, attr in self.iterInputAttributes():
1459
+ inputAttributes.append(key)
1460
+
1461
+ copyDict = Object.copy(self, other, copyId, inputAttributes)
1462
+ self._store()
1463
+ self.mapper.deleteRelations(self)
1464
+
1465
+ for r in other.getRelations():
1466
+ rName = r['name']
1467
+ rCreator = r['parent_id']
1468
+ rParent = r[OBJECT_PARENT_ID]
1469
+ rChild = r['object_child_id']
1470
+ rParentExt = r['object_parent_extended']
1471
+ rChildExt = r['object_child_extended']
1472
+
1473
+ if rParent in copyDict:
1474
+ rParent = copyDict.get(rParent).getObjId()
1475
+
1476
+ if rChild in copyDict:
1477
+ rChild = copyDict.get(rChild).getObjId()
1478
+
1479
+ self.mapper.insertRelationData(rName, rCreator, rParent, rChild,
1480
+ rParentExt, rChildExt)
1481
+
1482
+ def getRelations(self):
1483
+ """ Return the relations created by this protocol. """
1484
+ return self.mapper.getRelationsByCreator(self)
1485
+
1486
+ def _defineRelation(self, relName, parentObj, childObj):
1487
+ """ Insert a new relation in the mapper using self as creator. """
1488
+ parentExt = None
1489
+ childExt = None
1490
+
1491
+ if parentObj.isPointer():
1492
+ parentExt = parentObj.getExtended()
1493
+ parentObj = parentObj.getObjValue()
1494
+
1495
+ if childObj.isPointer():
1496
+ childExt = childObj.getExtended()
1497
+ childObj = childObj.getObjValue()
1498
+
1499
+ self.mapper.insertRelation(relName, self, parentObj, childObj,
1500
+ parentExt, childExt)
1501
+
1502
+ def makePathsAndClean(self):
1503
+ """ Create the necessary path or clean
1504
+ if in RESTART mode.
1505
+ """
1506
+ # Clean working path if in RESTART mode
1507
+ if self.runMode == MODE_RESTART:
1508
+ self.cleanWorkingDir()
1509
+ self.__deleteOutputs()
1510
+ # Delete the relations created by this protocol
1511
+ # (delete this in both project and protocol db)
1512
+ self.mapper.deleteRelations(self)
1513
+ self.makeWorkingDir()
1514
+
1515
+ def cleanWorkingDir(self):
1516
+ """
1517
+ Delete all files and subdirectories related with the protocol
1518
+ """
1519
+ self.cleanTmp()
1520
+ pwutils.cleanPath(self._getPath())
1521
+
1522
+ def makeWorkingDir(self):
1523
+ # Create workingDir, logs and extra paths
1524
+ paths = [self._getPath(), self._getExtraPath(), self._getLogsPath()]
1525
+ pwutils.makePath(*paths)
1526
+ # Create scratch if SCIPION_SCRATCH environment variable exist.
1527
+ # In other case, tmp folder is created
1528
+ pwutils.makeTmpPath(self)
1529
+
1530
+ def cleanTmp(self):
1531
+ """ Delete all files and subdirectories under Tmp folder. """
1532
+ tmpFolder = self._getTmpPath()
1533
+
1534
+ if os.path.islink(tmpFolder):
1535
+ pwutils.cleanPath(os.path.realpath(tmpFolder))
1536
+ os.remove(tmpFolder)
1537
+ else:
1538
+ pwutils.cleanPath(tmpFolder)
1539
+
1540
+ self._cleanExtraFiles()
1541
+ def _cleanExtraFiles(self):
1542
+ """ This method will be called when the protocol finishes correctly.
1543
+ It is the responsibility of the protocols to implement this method to make extra cleanup
1544
+ of its folders, like iterations folder and files that are not needed when finished
1545
+ """
1546
+
1547
+ logger.info("Nothing to clean up")
1548
+ logger.debug('FOR DEVELOPERS: implement Protocol._cleanExtraFiles this protocol could'
1549
+ ' free up some space upon finishing.')
1550
+
1551
+ def _run(self):
1552
+ # Check that a proper Steps executor have been set
1553
+ if self._stepsExecutor is None:
1554
+ raise Exception('Protocol.run: Steps executor should be set before '
1555
+ 'running protocol')
1556
+ # Check the parameters are correct
1557
+ errors = self.validate()
1558
+ if len(errors):
1559
+ raise ValidationException(
1560
+ 'Protocol has validation errors:\n' + '\n'.join(errors))
1561
+
1562
+ self._insertAllSteps() # Define steps for execute later
1563
+ # Find at which step we need to start
1564
+ doneSteps = self.__updateDoneSteps()
1565
+ # self.info(" Starting at step: %d" % (startIndex + 1))
1566
+ self._storeSteps()
1567
+ self.info(" Running steps ")
1568
+ self._runSteps(doneSteps)
1569
+
1570
+ def _getEnviron(self):
1571
+ """ This function should return an environ variable
1572
+ that will be used when running new programs.
1573
+ By default, the protocol will use the one defined
1574
+ in the package that it belongs or None.
1575
+ """
1576
+ return self.getClassPackage().Plugin.getEnviron()
1577
+
1578
+ def runJob(self, program, arguments, **kwargs):
1579
+ if self.stepsExecutionMode == STEPS_SERIAL:
1580
+ kwargs['numberOfMpi'] = kwargs.get('numberOfMpi',
1581
+ self.numberOfMpi.get())
1582
+ kwargs['numberOfThreads'] = kwargs.get('numberOfThreads',
1583
+ self.numberOfThreads.get())
1584
+ else:
1585
+ kwargs['numberOfMpi'] = kwargs.get('numberOfMpi', 1)
1586
+ kwargs['numberOfThreads'] = kwargs.get('numberOfThreads', 1)
1587
+ if 'env' not in kwargs:
1588
+ kwargs['env'] = self._getEnviron()
1589
+
1590
+ self._stepsExecutor.runJob(self._log, program, arguments, **kwargs)
1591
+
1592
+ def run(self):
1593
+ """ Before calling this method, the working dir for the protocol
1594
+ to run should exist.
1595
+ """
1596
+ try:
1597
+ action = "RUNNING" if self.runMode == MODE_RESTART else "RESUMING"
1598
+ self.info(pwutils.greenStr('%s PROTOCOL -----------------' % action))
1599
+ self.info("Protocol starts", extra=getExtraLogInfo("PROTOCOL", STATUS.START,
1600
+ project_name=self.getProject().getName(),
1601
+ prot_id=self.getObjId(),
1602
+ prot_name=self.getClassName()))
1603
+
1604
+ self.setHostFullName(pwutils.getHostFullName())
1605
+ self.info('Hostname: %s' % self.getHostFullName())
1606
+
1607
+ # Store the full machine name where the protocol is running
1608
+ # and also its PID
1609
+ if not self.useQueueForProtocol(): # Take as reference the pID
1610
+ self.setPid(os.getpid())
1611
+ self.info('PID: %s' % self.getPid())
1612
+ else: # Take as reference the jobID
1613
+ self.info('Executing through the queue system')
1614
+ self.info('JOBID: %s' % self.getJobIds())
1615
+
1616
+ self.info('pyworkflow: %s' % pw.__version__)
1617
+ plugin = self.getPlugin()
1618
+ self.info('plugin: %s - %s' % (plugin.getName(), plugin.getUrl()))
1619
+ package = self.getClassPackage()
1620
+ if hasattr(package, "__version__"):
1621
+ self.info('plugin v: %s%s' %(package.__version__, ' (devel)' if plugin.inDevelMode() else '(production)'))
1622
+ try:
1623
+ self.info('plugin binary v: %s' % plugin.getActiveVersion())
1624
+ except Exception as e:
1625
+ logger.error("Coudn't get the active version of the binary. This may be cause by a variable in the config"
1626
+ " file with a missing - in it and the protocol to fail.", exc_info=e)
1627
+ self.info('currentDir: %s' % os.getcwd())
1628
+ self.info('workingDir: %s' % self.workingDir)
1629
+ self.info('runMode: %s' % MODE_CHOICES[self.runMode.get()])
1630
+
1631
+ if self.modeSerial():
1632
+ self.info("Serial execution")
1633
+ else:
1634
+ self.info("Scipion threads: %d" % self.getScipionThreads())
1635
+
1636
+ try:
1637
+ self.info('binary MPI: %d' % self.numberOfMpi)
1638
+ self.info('binary Threads: %d' % self.getBinThreads())
1639
+ except Exception as e:
1640
+ self.info(' * Cannot get information about MPI/threads (%s)' % e)
1641
+ # Something went wrong and at this point status is launched. We mark it as failed.
1642
+ except Exception as e:
1643
+ logger.error("Couldn't start the protocol." , exc_info=e)
1644
+ self.setFailed(str(e))
1645
+ # self._store(self.status, self.getError())
1646
+ self._endRun()
1647
+ return
1648
+
1649
+ Step.run(self)
1650
+ # if self.isFailed():
1651
+ # self._store()
1652
+ self._endRun()
1653
+
1654
+ def _endRun(self):
1655
+ """ Print some ending message and close some files. """
1656
+ self._store() # Store all protocol attributes
1657
+ # self._store(self.summaryVar)
1658
+ # self._store(self.methodsVar)
1659
+ # self._store(self.endTime)
1660
+
1661
+ if pwutils.envVarOn(pw.SCIPION_DEBUG_NOCLEAN):
1662
+ self.warning('Not cleaning temp folder since '
1663
+ '%s is set to True.' % pw.SCIPION_DEBUG_NOCLEAN)
1664
+ elif not self.isFailed():
1665
+ self.info('Cleaning temp folder....')
1666
+ self.cleanTmp()
1667
+
1668
+ self.info(pwutils.greenStr('------------------- PROTOCOL ' +
1669
+ self.getStatusMessage().upper()),
1670
+ extra=getExtraLogInfo("PROTOCOL",STATUS.STOP,
1671
+ project_name=self.getProject().getName(),
1672
+ prot_id=self.getObjId(),
1673
+ prot_name=self.getClassName()))
1674
+
1675
+ def getLogPaths(self):
1676
+ return [self.getStdoutLog(),self.getStderrLog() , self.getScheduleLog()]
1677
+
1678
+ def getStdoutLog(self):
1679
+ return self._getLogsPath("run.stdout")
1680
+
1681
+ def getStderrLog(self):
1682
+ return self._getLogsPath('run.stderr')
1683
+
1684
+ def getScheduleLog(self):
1685
+ return self._getLogsPath('schedule.log')
1686
+
1687
+ def getSteps(self):
1688
+ """ Return the steps.sqlite file under logs directory. """
1689
+ return self._steps
1690
+
1691
+ def getStepsFile(self):
1692
+ """ Return the steps.sqlite file under logs directory. """
1693
+ return self._getLogsPath('steps.sqlite')
1694
+
1695
+
1696
+ def _addChunk(self, txt, fmt=None):
1697
+ """
1698
+ Add text txt to self._buffer, with format fmt.
1699
+ fmt can be a color (like 'red') or a link that looks like 'link:url'.
1700
+ """
1701
+ # Make the text html-safe first.
1702
+ for x, y in [('&', 'amp'), ('<', 'lt'), ('>', 'gt')]:
1703
+ txt = txt.replace(x, '&%s;' % y)
1704
+
1705
+ if fmt is None:
1706
+ self._buffer += txt
1707
+ elif fmt.startswith('link:'):
1708
+ url = fmt[len('link:'):]
1709
+ # Add the url in the TWiki style
1710
+ if url.startswith('http://'):
1711
+ self._buffer += '[[%s][%s]]' % (url, txt)
1712
+ # Web does not exist, webtools must find a solution for this case.
1713
+ # else:
1714
+ # from pyworkflow.web.pages import settings as django_settings
1715
+ # absolute_url = django_settings.ABSOLUTE_URL
1716
+ # self._buffer += '[[%s/get_log/?path=%s][%s]]' % (absolute_url,
1717
+ # url, txt)
1718
+ else:
1719
+ self._buffer += '<font color="%s">%s</font>' % (fmt, txt)
1720
+
1721
+ def getLogsAsStrings(self):
1722
+
1723
+ outputs = []
1724
+ for fname in self.getLogPaths():
1725
+ if pwutils.exists(fname):
1726
+ self._buffer = ''
1727
+ pwutils.renderTextFile(fname, self._addChunk)
1728
+ outputs.append(self._buffer)
1729
+ else:
1730
+ outputs.append('File "%s" does not exist' % fname)
1731
+ return outputs
1732
+
1733
+ def getLogsLastLines(self, lastLines=None, logFile=0):
1734
+ """
1735
+ Get the last(lastLines) lines of a log file.
1736
+
1737
+ :param lastLines, if None, will try 'PROT_LOGS_LAST_LINES' env variable, otherwise 20
1738
+ :param logFile: Log file to take the lines from, default = 0 (std.out). 1 for stdErr.
1739
+ """
1740
+ if not lastLines:
1741
+ lastLines = int(os.environ.get('PROT_LOGS_LAST_LINES', 20))
1742
+
1743
+ # Get stdout
1744
+ stdoutFn =self.getLogPaths()[logFile]
1745
+
1746
+ if not os.path.exists(stdoutFn):
1747
+ return []
1748
+
1749
+ with open(stdoutFn, 'r') as stdout:
1750
+
1751
+ iterlen = lambda it: sum(1 for _ in it)
1752
+ numLines = iterlen(stdout)
1753
+
1754
+ lastLines = min(lastLines, numLines)
1755
+ sk = numLines - lastLines
1756
+ sk = max(sk, 0)
1757
+
1758
+ stdout.seek(0, 0)
1759
+ output = [l.strip('\n') for k, l in enumerate(stdout)
1760
+ if k >= sk]
1761
+ return output
1762
+
1763
+ def warning(self, message, redirectStandard=True):
1764
+ self._log.warning(message)
1765
+
1766
+ def info(self, message, extra=None):
1767
+ self._log.info(message, extra= extra)
1768
+
1769
+ def error(self, message, redirectStandard=True):
1770
+ self._log.error(message)
1771
+
1772
+ def debug(self, message):
1773
+ self._log.debug(message)
1774
+
1775
+ def getWorkingDir(self):
1776
+ return self.workingDir.get()
1777
+
1778
+ def setWorkingDir(self, path):
1779
+ self.workingDir.set(path)
1780
+
1781
+ def setMapper(self, mapper):
1782
+ """ Set a new mapper for the protocol to persist state. """
1783
+ self.mapper = mapper
1784
+
1785
+ def getMapper(self):
1786
+ return self.mapper
1787
+
1788
+ def getDbPath(self):
1789
+ return self._getLogsPath('run.db')
1790
+
1791
+ def setStepsExecutor(self, executor=None):
1792
+ if executor is None:
1793
+ executor = StepExecutor(self.getHostConfig())
1794
+
1795
+ self._stepsExecutor = executor
1796
+ self._stepsExecutor.setProtocol(self) # executor needs the protocol to store the jobs Ids submitted to a queue
1797
+
1798
+ def getExecutor(self):
1799
+ """Return the executor associated. This must be used only during protocol execution (steps code).
1800
+ In "design/GUI time" is not set."""
1801
+ return self._stepsExecutor
1802
+
1803
+ def getFiles(self):
1804
+ resultFiles = set()
1805
+ for paramName, _ in self.getDefinition().iterPointerParams():
1806
+ # Get all self attribute that are pointers
1807
+ attrPointer = getattr(self, paramName)
1808
+ obj = attrPointer.get() # Get object pointer by the attribute
1809
+ if hasattr(obj, 'getFiles'):
1810
+ resultFiles.update(obj.getFiles()) # Add files if any
1811
+ return resultFiles | pwutils.getFiles(self.workingDir.get())
1812
+
1813
+ def getHostName(self):
1814
+ """ Get the execution host name.
1815
+ This value is only the key of the host in the configuration file.
1816
+ """
1817
+ return self.hostName.get()
1818
+
1819
+ def setHostName(self, hostName):
1820
+ """ Set the execution host name (the host key in the config file) """
1821
+ self.hostName.set(hostName)
1822
+
1823
+ def getHostFullName(self):
1824
+ """ Return the full machine name where the protocol is running. """
1825
+ return self.hostFullName.get()
1826
+
1827
+ def setHostFullName(self, hostFullName):
1828
+ self.hostFullName.set(hostFullName)
1829
+
1830
+ def getHostConfig(self):
1831
+ """ Return the configuration host. """
1832
+ return self.hostConfig
1833
+
1834
+ def setHostConfig(self, config):
1835
+ self.hostConfig = config
1836
+ # Never store the host config as part of the protocol, it is kept
1837
+ # in the configuration information, the hostname is enough
1838
+ self.hostConfig.setStore(False)
1839
+
1840
+ def getJobIds(self):
1841
+ """ Return an iterable list of jobs Ids associated to a running protocol. """
1842
+ return self._jobId
1843
+
1844
+ def setJobId(self, jobId):
1845
+ " Reset this list to have the first active job "
1846
+ self._jobId.clear()
1847
+ self.appendJobId(jobId)
1848
+
1849
+ def setJobIds(self, jobIds):
1850
+ " Reset this list to have a list of active jobs "
1851
+ self._jobId = jobIds
1852
+
1853
+ def appendJobId(self, jobId):
1854
+ " Append active jobs to the list "
1855
+ self._jobId.append(jobId)
1856
+ def removeJobId(self, jobId):
1857
+ " Remove inactive jobs from the list "
1858
+ self._jobId.remove(jobId)
1859
+
1860
+ def getPid(self):
1861
+ return self._pid.get()
1862
+
1863
+ def setPid(self, pid):
1864
+ self._pid.set(pid)
1865
+
1866
+ def getRunName(self):
1867
+ runName = self.getObjLabel().strip()
1868
+ if not len(runName):
1869
+ runName = self.getDefaultRunName()
1870
+ return runName
1871
+
1872
+ def getDefaultRunName(self):
1873
+ return '%s.%s' % (self.getClassName(), self.strId())
1874
+
1875
+ @classmethod
1876
+ def getClassPackage(cls):
1877
+ """ Return the package module to which this protocol belongs.
1878
+ This function will only work, if for the given Domain, the
1879
+ method Domain.getProtocols() has been called once. After calling
1880
+ this method the protocol classes are registered with it Plugin
1881
+ and Domain info.
1882
+ """
1883
+ return cls._package
1884
+
1885
+ @classmethod
1886
+ def getClassPlugin(cls):
1887
+
1888
+ logger.warning("Deprecated on 04-2023. Use Protocol.getPlugin instead.")
1889
+ return cls.getPlugin()
1890
+
1891
+ @classmethod
1892
+ def getPlugin(cls):
1893
+ return cls._plugin
1894
+ @classmethod
1895
+ def getClassPackageName(cls):
1896
+ return cls.getClassPackage().__name__ if cls.getClassPackage() else "orphan"
1897
+
1898
+ @classmethod
1899
+ def getClassDomain(cls):
1900
+ """ Return the Domain class where this Protocol class is defined. """
1901
+ return pw.Config.getDomain()
1902
+
1903
+ @classmethod
1904
+ def getPluginLogoPath(cls):
1905
+ package = cls.getClassPackage()
1906
+ logo = getattr(package, '_logo', None)
1907
+ if logo:
1908
+ logoPath = (pw.findResource(logo) or
1909
+ os.path.join(os.path.abspath(os.path.dirname(package.__file__)), logo))
1910
+ else:
1911
+ logoPath = None
1912
+
1913
+ return logoPath
1914
+
1915
+ @classmethod
1916
+ def validatePackageVersion(cls, varName, errors):
1917
+ """
1918
+ Function to validate the package version specified in
1919
+ configuration file ~/.config/scipion/scipion.conf is among the available
1920
+ options and it is properly installed.
1921
+
1922
+ :param package: the package object (ej: eman2 or relion). Package should contain the
1923
+ following methods: getVersion(), getSupportedVersions()
1924
+ :param varName: the expected environment var containing the path (and version)
1925
+ :param errors: list of strings to add errors if found
1926
+
1927
+ """
1928
+ package = cls.getClassPackage()
1929
+ packageName = cls.getClassPackageName()
1930
+ varValue = package.Plugin.getVar(varName)
1931
+ versions = ','.join(package.Plugin.getSupportedVersions())
1932
+
1933
+ errorMsg = None
1934
+
1935
+ if not package.Plugin.getActiveVersion():
1936
+ errors.append("We could not detect *%s* version. " % packageName)
1937
+ errorMsg = "The path value should contains a valid version (%s)." % versions
1938
+ elif not os.path.exists(varValue):
1939
+ errors.append("Path of %s does not exists." % varName)
1940
+ errorMsg = "Check installed packages and versions with command:\n "
1941
+ errorMsg += "*scipion install --help*"
1942
+
1943
+ if errorMsg:
1944
+ errors.append("%s = %s" % (varName, varValue))
1945
+ errors.append(
1946
+ "Please, modify %s value in the configuration file:" % varName)
1947
+ errors.append("*~/.config/scipion/scipion.conf*")
1948
+ errors.append(errorMsg)
1949
+ errors.append("After fixed, you NEED TO RESTART THE PROJECT WINDOW")
1950
+
1951
+ @classmethod
1952
+ def getClassLabel(cls, prependPackageName=True):
1953
+ """ Return a more readable string representing the protocol class """
1954
+ label = cls.__dict__.get('_label', cls.__name__)
1955
+ if prependPackageName:
1956
+ try:
1957
+ label = "%s - %s" % (cls.getPlugin().getName(), label)
1958
+ except Exception as e:
1959
+ label = "%s -%s" % ("missing", label)
1960
+ logger.error("Couldn't get the plugin name for %s" % label, exc_info=e)
1961
+ return label
1962
+
1963
+ @classmethod
1964
+ def isDisabled(cls):
1965
+ """ Return True if this Protocol is disabled.
1966
+ Disabled protocols will not be offered in the available protocols."""
1967
+ return False
1968
+
1969
+ @classmethod
1970
+ def isBase(cls):
1971
+ """ Return True if this Protocol is a base class.
1972
+ Base classes should be marked with _label = None.
1973
+ """
1974
+ return cls.__dict__.get('_label', None) is None
1975
+
1976
+ def getSubmitDict(self):
1977
+ """ Return a dictionary with the necessary keys to
1978
+ launch the job to a queue system.
1979
+ """
1980
+ queueName, queueParams = self.getQueueParams()
1981
+ hc = self.getHostConfig()
1982
+
1983
+ scipion_project = "SCIPION_PROJECT" if self.getProject() is None else self.getProject().getShortName()
1984
+
1985
+ d = {'JOB_NAME': self.strId(),
1986
+ 'JOB_QUEUE': queueName,
1987
+ 'JOB_NODES': max([1,self.numberOfMpi.get()]),
1988
+ 'JOB_THREADS': max([1,self.numberOfThreads.get()]),
1989
+ 'JOB_CORES': max([1,self.numberOfMpi.get() * self.numberOfThreads.get()]),
1990
+ 'JOB_HOURS': 72,
1991
+ 'GPU_COUNT': len(self.getGpuList()),
1992
+ QUEUE_FOR_JOBS: 'N',
1993
+ PLUGIN_MODULE_VAR: self.getPlugin().getName(),
1994
+ 'SCIPION_PROJECT': scipion_project,
1995
+ 'SCIPION_PROTOCOL': self.getRunName()
1996
+ }
1997
+
1998
+ # Criteria in HostConfig.load to load or not QUEUE variables
1999
+ if hc.getQueueSystem().hasName():
2000
+ job_logs = self._getLogsPath(hc.getSubmitPrefix() + self.strId())
2001
+ d['JOB_SCRIPT'] = job_logs + '.job'
2002
+ d['JOB_LOGS'] = job_logs
2003
+ d['JOB_NODEFILE'] = os.path.abspath(job_logs +'.nodefile')
2004
+
2005
+ d.update(queueParams)
2006
+ return d
2007
+
2008
+ def useQueue(self):
2009
+ """ Return True if the protocol should be launched through a queue. """
2010
+ return self._useQueue.get()
2011
+
2012
+ def useQueueForSteps(self):
2013
+ """ This function will return True if the protocol has been set
2014
+ to be launched through a queue by steps """
2015
+ return self.useQueue() and (self.getSubmitDict()[QUEUE_FOR_JOBS] == "Y")
2016
+
2017
+ def useQueueForProtocol(self):
2018
+ """ This function will return True if the protocol has been set
2019
+ to be launched through a queue """
2020
+ return self.useQueue() and (self.getSubmitDict()[QUEUE_FOR_JOBS] != "Y")
2021
+
2022
+ def getQueueParams(self):
2023
+ if self._queueParams.hasValue():
2024
+ return json.loads(self._queueParams.get())
2025
+ else:
2026
+ return '', {}
2027
+
2028
+ def hasQueueParams(self):
2029
+ return self._queueParams.hasValue()
2030
+
2031
+ def setQueueParams(self, queueParams):
2032
+ self._queueParams.set(json.dumps(queueParams))
2033
+
2034
+ @property
2035
+ def numberOfSteps(self):
2036
+ return self._numberOfSteps.get(0)
2037
+
2038
+ @property
2039
+ def stepsDone(self):
2040
+ """ Return the number of steps executed. """
2041
+ return self._stepsDone.get(0)
2042
+
2043
+ @property
2044
+ def cpuTime(self):
2045
+ """ Return the sum of all durations of the finished steps"""
2046
+ return self._cpuTime.get()
2047
+
2048
+ def updateSteps(self):
2049
+ """ After the steps list is modified, this methods will update steps
2050
+ information. It will save the steps list and also the number of steps.
2051
+ """
2052
+ self._storeSteps()
2053
+ self._numberOfSteps.set(len(self._steps))
2054
+ self._store(self._numberOfSteps)
2055
+ self._newSteps = False
2056
+
2057
+ def getStatusMessage(self):
2058
+ """ Return the status string and if running the steps done.
2059
+ """
2060
+ msg = self.getStatus()
2061
+ if self.isRunning() or self.isAborted() or self.isFailed():
2062
+ msg += " (done %d/%d)" % (self.stepsDone, self.numberOfSteps)
2063
+
2064
+ return msg
2065
+
2066
+ def getRunMode(self):
2067
+ """ Return the mode of execution, either:
2068
+ MODE_RESTART or MODE_RESUME. """
2069
+ return self.runMode.get()
2070
+
2071
+ def hasSummaryWarnings(self):
2072
+ return len(self.summaryWarnings) != 0
2073
+
2074
+ def addSummaryWarning(self, warningDescription):
2075
+ """Appends the warningDescription param to the list of summaryWarnings.
2076
+ Will be printed in the protocol summary."""
2077
+ self.summaryWarnings.append(warningDescription)
2078
+ return self.summaryWarnings
2079
+
2080
+ def checkSummaryWarnings(self):
2081
+ """ Checks for warnings that we want to tell the user about by adding a
2082
+ warning sign to the run box and a description to the run summary.
2083
+ List of warnings checked:
2084
+ 1. If the folder for this protocol run exists.
2085
+ """
2086
+ if not self.isSaved() and not os.path.exists(self.workingDir.get()):
2087
+ self.addSummaryWarning("*Missing run data*: The directory for this "
2088
+ "run is missing, so it won't be possible to "
2089
+ "use its outputs in other protocols.")
2090
+
2091
+ def isContinued(self):
2092
+ """ Return if running in continue mode (MODE_RESUME). """
2093
+ return self.getRunMode() == MODE_RESUME
2094
+
2095
+ # Methods that should be implemented in subclasses
2096
+ def _validate(self):
2097
+ """ This function can be overwritten by subclasses.
2098
+ Used from the public validate function.
2099
+ """
2100
+ return []
2101
+
2102
+ @classmethod
2103
+ def getUrl(cls):
2104
+ return cls.getPlugin().getUrl(cls)
2105
+
2106
+ @classmethod
2107
+ def isInstalled(cls):
2108
+ # We a consider a protocol installed if there are not errors
2109
+ # from the _validateInstallation function
2110
+ return not cls.validateInstallation()
2111
+
2112
+ @classmethod
2113
+ def validateInstallation(cls):
2114
+ """ Check if the installation of this protocol is correct.
2115
+ By default, we will check if the protocols' package provide a
2116
+ validateInstallation function and use it.
2117
+ Returning an empty list means that the installation is correct
2118
+ and there are not errors. If some errors are found, a list with
2119
+ the error messages will be returned.
2120
+ """
2121
+ try:
2122
+ validateFunc = getattr(cls.getClassPackage().Plugin,
2123
+ 'validateInstallation', None)
2124
+
2125
+ return validateFunc() if validateFunc is not None else []
2126
+ except Exception as e:
2127
+ msg = str(e)
2128
+ msg += (" %s installation couldn't be validated. Possible cause "
2129
+ "could be a configuration issue. Try to run scipion "
2130
+ "config." % cls.__name__)
2131
+ print(msg)
2132
+ return [msg]
2133
+
2134
+ def validate(self):
2135
+ """ Check that input parameters are correct.
2136
+ Return a list with errors, if the list is empty, all was ok.
2137
+ """
2138
+ errors = []
2139
+ # Validate that all input pointer parameters have a value
2140
+ for paramName, param in self.getDefinition().iterParams():
2141
+ # Get all self attribute that are pointers
2142
+ attr = getattr(self, paramName)
2143
+ paramErrors = []
2144
+ condition = self.evalParamCondition(paramName)
2145
+ if attr.isPointer():
2146
+ obj = attr.get()
2147
+ if condition and obj is None and not param.allowsNull:
2148
+ paramErrors.append('cannot be EMPTY.')
2149
+ elif isinstance(attr, PointerList):
2150
+ # In this case allowsNull refers to not allowing empty items
2151
+ if not param.allowsNull:
2152
+ if len(attr) == 0:
2153
+ paramErrors.append('cannot be EMPTY.')
2154
+ # Consider empty pointers
2155
+ else:
2156
+ if any(pointer.get() is None for pointer in attr):
2157
+ paramErrors.append('Can not have EMPTY items.')
2158
+
2159
+ else:
2160
+ if condition:
2161
+ paramErrors = param.validate(attr.get())
2162
+ label = param.label.get()
2163
+ errors += ['*%s* %s' % (label, err) for err in paramErrors]
2164
+
2165
+ try:
2166
+ # Check that all ids specified in the 'Wait for' form entry
2167
+ # are valid protocol ids
2168
+ proj = self.getProject()
2169
+ for protId in self.getPrerequisites():
2170
+ try:
2171
+ prot = proj.getProtocol(int(protId))
2172
+ except Exception:
2173
+ prot = None
2174
+ if prot is None:
2175
+ errors.append('*%s* is not a valid protocol id.' % protId)
2176
+
2177
+ # Validate specific for the subclass
2178
+ installErrors = self.validateInstallation()
2179
+ if installErrors:
2180
+ errors += installErrors
2181
+ childErrors = self._validate()
2182
+ if childErrors:
2183
+ errors += childErrors
2184
+ except Exception:
2185
+ import urllib
2186
+ exceptionStr = pwutils.formatExceptionInfo()
2187
+ errors.append("Protocol validation failed. It usually happens because there are some "
2188
+ "input missing. Please check if the error message gives you any "
2189
+ "hint:\n{}".format(exceptionStr))
2190
+ return errors
2191
+
2192
+ def _warnings(self):
2193
+ """ Should be implemented in subclasses. See warning. """
2194
+ return []
2195
+
2196
+ def warnings(self):
2197
+ """ Return some message warnings that can be errors.
2198
+ User should approve to execute a protocol with warnings. """
2199
+ return self._warnings()
2200
+
2201
+ def _summary(self):
2202
+ """ Should be implemented in subclasses. See summary. """
2203
+ return ["No summary information."]
2204
+
2205
+ def summary(self):
2206
+ """ Return a summary message to provide some information to users. """
2207
+ try:
2208
+ baseSummary = self._summary() or ['No summary information.']
2209
+
2210
+ if isinstance(baseSummary, str):
2211
+ baseSummary = [baseSummary]
2212
+
2213
+ if not isinstance(baseSummary, list):
2214
+ raise Exception("Developers error: _summary() is not returning "
2215
+ "a list")
2216
+
2217
+ comments = self.getObjComment()
2218
+ if comments:
2219
+ baseSummary += ['', '*COMMENTS:* ', comments]
2220
+
2221
+ if self.getError().hasValue():
2222
+ baseSummary += ['', '*ERROR:*', self.getError().get()]
2223
+
2224
+ if self.summaryWarnings:
2225
+ baseSummary += ['', '*WARNINGS:*']
2226
+ baseSummary += self.summaryWarnings
2227
+
2228
+ except Exception as ex:
2229
+ baseSummary = [str(ex)]
2230
+
2231
+ return baseSummary
2232
+
2233
+ def getFileTag(self, fn):
2234
+ return "[[%s]]" % fn
2235
+
2236
+ def getObjectTag(self, objName):
2237
+ if isinstance(objName, str):
2238
+ obj = getattr(self, objName, None)
2239
+ else:
2240
+ obj = objName
2241
+
2242
+ if obj is None:
2243
+ return '*None*'
2244
+
2245
+ if obj.isPointer():
2246
+ obj = obj.get() # get the pointed object
2247
+ if obj is None:
2248
+ return '*None*'
2249
+
2250
+ return "[[sci-open:%s][%s]]" % (obj.getObjId(), obj.getNameId())
2251
+
2252
+ def _citations(self):
2253
+ """ Should be implemented in subclasses. See citations. """
2254
+ return getattr(self, "_references", [])
2255
+
2256
+ def __getPluginBibTex(self):
2257
+ """ Return the _bibtex from the package """
2258
+ return getattr(self.getClassPackage(), "_bibtex", {})
2259
+
2260
+ def _getCite(self, citeStr):
2261
+ bibtex = self.__getPluginBibTex()
2262
+ if citeStr in bibtex:
2263
+ text = self._getCiteText(bibtex[citeStr])
2264
+ else:
2265
+ text = "Reference with key *%s* not found." % citeStr
2266
+ return text
2267
+
2268
+ def _getCiteText(self, cite, useKeyLabel=False):
2269
+ try:
2270
+
2271
+ journal = cite.get("journal", cite.get("booktitle", ""))
2272
+ doi = cite.get("doi", "").strip()
2273
+ url = cite.get("url", "").strip()
2274
+ # Get the first author surname
2275
+ if useKeyLabel:
2276
+ label = cite['ID']
2277
+ else:
2278
+ label = cite['author'].split(' and ')[0].split(',')[0].strip()
2279
+ label += ' et al., %s, %s' % (journal, cite['year'])
2280
+ if len(doi) > 0:
2281
+ text = '[[%s][%s]] ' % (doi, label)
2282
+ elif len(url) > 0:
2283
+ text = '[[%s][%s]] ' % (url, label)
2284
+ else:
2285
+ text = label.strip()
2286
+ return text
2287
+
2288
+ except Exception as ex:
2289
+ print("Error with citation: " + label)
2290
+ print(ex)
2291
+ text = "Error with citation *%s*." % label
2292
+ return text
2293
+
2294
+ def __getCitations(self, citations):
2295
+ """ From the list of citations keys, obtains the full
2296
+ info from the package _bibtex dict.
2297
+ """
2298
+ bibtex = self.__getPluginBibTex()
2299
+ newCitations = []
2300
+ for c in citations:
2301
+ if c in bibtex:
2302
+ newCitations.append(self._getCiteText(bibtex[c]))
2303
+ else:
2304
+ newCitations.append(c)
2305
+ return newCitations
2306
+
2307
+ def __getCitationsDict(self, citationList, bibTexOutput=False):
2308
+ """ Return a dictionary with Cite keys and the citation links. """
2309
+ bibtex = self.__getPluginBibTex()
2310
+ od = OrderedDict()
2311
+ for c in citationList:
2312
+ if c in bibtex:
2313
+ if bibTexOutput:
2314
+ od[c] = bibtex[c]
2315
+ else:
2316
+ od[c] = self._getCiteText(bibtex[c])
2317
+ else:
2318
+ od[c] = c
2319
+
2320
+ return od
2321
+
2322
+ def getCitations(self, bibTexOutput=False):
2323
+ return self.__getCitationsDict(self._citations() or [],
2324
+ bibTexOutput=bibTexOutput)
2325
+
2326
+ def getPackageCitations(self, bibTexOutput=False):
2327
+ refs = getattr(self.getClassPackage(), "_references", [])
2328
+ return self.__getCitationsDict(refs, bibTexOutput=bibTexOutput)
2329
+
2330
+ def citations(self):
2331
+ """ Return a citation message to provide some information to users. """
2332
+ citations = list(self.getCitations().values())
2333
+ if citations:
2334
+ citations.insert(0, '*Protocol references:* ')
2335
+
2336
+ packageCitations = self.getPackageCitations().values()
2337
+ if packageCitations:
2338
+ citations.append('*Package references:*')
2339
+ citations += packageCitations
2340
+ if not citations:
2341
+ return ['No references provided']
2342
+ return citations
2343
+
2344
+ @classmethod
2345
+ def getHelpText(cls):
2346
+ """Get help text to show in the protocol help button"""
2347
+ helpText = cls.getDoc()
2348
+ # NOt used since getPlugin is always None
2349
+ # plugin = self.getPlugin()
2350
+ # if plugin:
2351
+ # pluginMetadata = plugin.metadata
2352
+ # helpText += "\n\nPlugin info:\n"
2353
+ # for key, value in pluginMetadata.iteritems():
2354
+ # helpText += "%s: \t%s\n" % (key, value)
2355
+ return helpText
2356
+
2357
+ def _methods(self):
2358
+ """ Should be implemented in subclasses. See methods. """
2359
+ return ["No methods information."]
2360
+
2361
+ def getParsedMethods(self):
2362
+ """ Get the _methods results and parse possible cites. """
2363
+ try:
2364
+ baseMethods = self._methods() or []
2365
+ bibtex = self.__getPluginBibTex()
2366
+ parsedMethods = []
2367
+ for m in baseMethods:
2368
+ for bibId, cite in bibtex.items():
2369
+ k = '[%s]' % bibId
2370
+ link = self._getCiteText(cite, useKeyLabel=True)
2371
+ m = m.replace(k, link)
2372
+ parsedMethods.append(m)
2373
+ except Exception as ex:
2374
+ parsedMethods = ['ERROR generating methods info: %s' % ex]
2375
+
2376
+ return parsedMethods
2377
+
2378
+ def methods(self):
2379
+ """ Return a description about methods about current protocol
2380
+ execution. """
2381
+ # TODO: Maybe store the methods and not computing all times??
2382
+ return self.getParsedMethods() + [''] + self.citations()
2383
+
2384
+ def runProtocol(self, protocol):
2385
+ """ Setup another protocol to be run from a workflow. """
2386
+ name = protocol.getClassName() + protocol.strId()
2387
+ # protocol.setName(name)
2388
+ protocol.setWorkingDir(self._getPath(name))
2389
+ protocol.setMapper(self.mapper)
2390
+ self.hostConfig.setStore(False)
2391
+ protocol.setHostConfig(self.getHostConfig())
2392
+ protocol.runMode.set(self._originalRunMode)
2393
+ protocol.makePathsAndClean()
2394
+ protocol.setStepsExecutor(self._stepsExecutor)
2395
+ protocol.run()
2396
+ self._store() # TODO: check if this is needed
2397
+
2398
+ def isChild(self):
2399
+ """ Return true if this protocol was invoked from a workflow
2400
+ (another protocol)"""
2401
+ return self.hasObjParentId()
2402
+
2403
+ def getStepsGraph(self, refresh=True):
2404
+ """ Build a graph taking into account the dependencies between
2405
+ steps. In streaming we might find first the createOutputStep (e.g 24)
2406
+ depending on 25"""
2407
+ from pyworkflow.utils.graph import Graph
2408
+ g = Graph(rootName='PROTOCOL')
2409
+ root = g.getRoot()
2410
+ root.label = 'Protocol'
2411
+
2412
+ steps = self.loadSteps()
2413
+ stepsDict = {str(i + 1): steps[i] for i in range(0, len(steps))}
2414
+ stepsDone = {}
2415
+
2416
+ def addStep(i, step):
2417
+
2418
+ # Exit if already done
2419
+ # This happens when, in streaming there is a child "before" a parent
2420
+ if i in stepsDone:
2421
+ return
2422
+
2423
+ index = step.getIndex() or i
2424
+ sid = str(index)
2425
+ n = g.createNode(sid)
2426
+ n.step = step
2427
+ stepsDone[i] = n
2428
+ if step.getPrerequisites().isEmpty():
2429
+ root.addChild(n)
2430
+ else:
2431
+ for p in step.getPrerequisites():
2432
+ # If prerequisite exists
2433
+ if p not in stepsDone:
2434
+ addStep(p, stepsDict[p])
2435
+ stepsDone[p].addChild(n)
2436
+
2437
+ for i, s in stepsDict.items():
2438
+ addStep(i, s)
2439
+ return g
2440
+
2441
+ def closeMappers(self):
2442
+ """ Close the mappers of all output Sets. """
2443
+ for _, attr in self.iterOutputAttributes(Set):
2444
+ attr.close()
2445
+
2446
+ def loadMappers(self):
2447
+ """ Open mapper connections from previous closed outputs. """
2448
+ for _, attr in self.iterOutputAttributes(Set):
2449
+ attr.load()
2450
+
2451
+ def allowsDelete(self, obj):
2452
+ return False
2453
+
2454
+ def legacyCheck(self):
2455
+ """ Hook defined to run some compatibility checks
2456
+ before display the protocol.
2457
+ """
2458
+ pass
2459
+
2460
+ def getSize(self):
2461
+ """ Returns the size of the folder corresponding to this protocol"""
2462
+ if not self._size:
2463
+ self._size = getFileSize(self.getPath())
2464
+
2465
+ return self._size
2466
+
2467
+ def cleanExecutionAttributes(self, includeSteps=True):
2468
+ """ Clean all the executions attributes """
2469
+ self.setPid(0)
2470
+ self._jobId.clear()
2471
+ if includeSteps:
2472
+ self._stepsDone.set(0)
2473
+
2474
+ class LegacyProtocol(Protocol):
2475
+ """ Special subclass of Protocol to be used when a protocol class
2476
+ is not found. It means that have been removed or it is in another
2477
+ development branch. In such, we will use the LegacyProtocol to
2478
+ simply store the parameters and inputs/outputs."""
2479
+
2480
+ def __str__(self):
2481
+ return self.getObjLabel()
2482
+
2483
+ # overload getClassDomain because legacy protocols
2484
+ # do not have a package associated to it
2485
+ @classmethod
2486
+ def getClassDomain(cls):
2487
+ return pw.Config.getDomain()
2488
+
2489
+
2490
+ # ---------- Helper functions related to Protocols --------------------
2491
+
2492
+ def runProtocolMain(projectPath, protDbPath, protId):
2493
+ """
2494
+ Main entry point when a protocol will be executed.
2495
+ This function should be called when::
2496
+
2497
+ scipion runprotocol ...
2498
+
2499
+ :param projectPath: the absolute path to the project directory.
2500
+ :param protDbPath: path to protocol db relative to projectPath
2501
+ :param protId: id of the protocol object in db.
2502
+
2503
+ """
2504
+
2505
+ # Enter to the project directory and load protocol from db
2506
+ protocol = getProtocolFromDb(projectPath, protDbPath, protId, chdir=True)
2507
+
2508
+ setDefaultLoggingContext(protId, protocol.getProject().getShortName())
2509
+
2510
+ if isinstance(protocol,LegacyProtocol):
2511
+ logger.error(f"There is a problem loading the protocol {protId} ({protocol}) at {pwutils.getHostName()} "
2512
+ f"Installations of the execution differs from the visualization installation. "
2513
+ f"This is probably because you are running this protocol in a cluster node which installation is not "
2514
+ f"compatible with the head node or you have a plugin available on the Main GUI process (check launching directory) but "
2515
+ f"not properly installed as a plugin in Scipion. Please verify installation.")
2516
+ sys.exit()
2517
+ hostConfig = protocol.getHostConfig()
2518
+ gpuList = protocol.getGpuList()
2519
+
2520
+ #If queue is to be used
2521
+ if protocol.useQueue():
2522
+ gpuList = anonimizeGPUs(gpuList)
2523
+
2524
+ # Create the steps executor
2525
+ executor = None
2526
+ nThreads = max(protocol.numberOfThreads.get(), 1)
2527
+
2528
+ if protocol.modeParallel() and nThreads > 1:
2529
+ if protocol.useQueueForSteps():
2530
+ executor = QueueStepExecutor(hostConfig,
2531
+ protocol.getSubmitDict(),
2532
+ nThreads - 1,
2533
+ gpuList=gpuList)
2534
+ else:
2535
+ executor = ThreadStepExecutor(hostConfig, nThreads - 1,
2536
+ gpuList=gpuList)
2537
+
2538
+ if executor is None and protocol.useQueueForSteps():
2539
+ executor = QueueStepExecutor(hostConfig, protocol.getSubmitDict(), 1,
2540
+ gpuList=gpuList)
2541
+
2542
+ if executor is None:
2543
+ executor = StepExecutor(hostConfig,
2544
+ gpuList=gpuList)
2545
+
2546
+ logger.info("Running protocol using the %s executor." % executor)
2547
+ protocol.setStepsExecutor(executor)
2548
+ # Finally run the protocol
2549
+ protocol.run()
2550
+
2551
+
2552
+ def anonimizeGPUs(gpuList):
2553
+
2554
+ renamedGPUs=dict()
2555
+ anonimousGPUs = []
2556
+
2557
+ for gpu in gpuList:
2558
+
2559
+ if gpu not in renamedGPUs:
2560
+ renamedGPUs[gpu] = len(renamedGPUs)
2561
+
2562
+ anonimousGPUs.append(renamedGPUs[gpu])
2563
+
2564
+ return anonimousGPUs
2565
+
2566
+
2567
+
2568
+ def getProtocolFromDb(projectPath, protDbPath, protId, chdir=False):
2569
+ """ Retrieve the Protocol object from a given .sqlite file
2570
+ and the protocol id.
2571
+ """
2572
+
2573
+ if not os.path.exists(projectPath):
2574
+ raise Exception("ERROR: project path '%s' does not exist. "
2575
+ % projectPath)
2576
+
2577
+ fullDbPath = os.path.join(projectPath, protDbPath)
2578
+
2579
+ if not os.path.exists(fullDbPath):
2580
+ raise Exception("ERROR: protocol database '%s' does not exist. "
2581
+ % fullDbPath)
2582
+
2583
+ # We need this import here because from Project is imported
2584
+ # all from protocol indirectly, so if move this to the top
2585
+ # we get an import error
2586
+ from pyworkflow.project import Project
2587
+ project = Project(pw.Config.getDomain(), projectPath)
2588
+ project.load(dbPath=os.path.join(projectPath, protDbPath), chdir=chdir,
2589
+ loadAllConfig=False)
2590
+ protocol = project.getProtocol(protId)
2591
+ return protocol
2592
+
2593
+
2594
+ def getUpdatedProtocol(protocol):
2595
+ """ Retrieve the updated protocol and close db connections
2596
+ """
2597
+ prot2 = getProtocolFromDb(protocol.getProject().path,
2598
+ protocol.getDbPath(),
2599
+ protocol.getObjId())
2600
+ # Close DB connections
2601
+ prot2.getProject().closeMapper()
2602
+ prot2.closeMappers()
2603
+ return prot2
2604
+
2605
+
2606
+ def isProtocolUpToDate(protocol):
2607
+ """ Check timestamps between protocol lastModificationDate and the
2608
+ corresponding runs.db timestamp"""
2609
+ if protocol is None:
2610
+ return True
2611
+
2612
+ if protocol.lastUpdateTimeStamp.get(None) is None:
2613
+ return False
2614
+
2615
+ protTS = protocol.lastUpdateTimeStamp.datetime()
2616
+
2617
+ if protTS is None:
2618
+ return False
2619
+
2620
+ dbTS = pwutils.getFileLastModificationDate(protocol.getDbPath())
2621
+
2622
+ if not (protTS and dbTS):
2623
+ logger.info("Can't compare if protocol is up to date: "
2624
+ "Protocol %s, protocol time stamp: %s, %s timeStamp: %s"
2625
+ % (protocol, protTS, protocol, dbTS))
2626
+ else:
2627
+ return protTS >= dbTS
2628
+
2629
+
2630
+ class ProtImportBase(Protocol):
2631
+ """ Base Import protocol"""
2632
+
2633
+
2634
+ class ProtStreamingBase(Protocol):
2635
+ """ Base protocol to implement streaming protocols.
2636
+ stepsGeneratorStep should be implemented (see its description) and output
2637
+ should be created at the end of the processing Steps created by the stepsGeneratorStep.
2638
+ To avoid concurrency error, when creating the output, do it in a with self._lock: block.
2639
+ Minimum number of threads is 3 and should run in parallel mode.
2640
+ """
2641
+
2642
+ stepsExecutionMode = STEPS_PARALLEL
2643
+
2644
+ def _defineStreamingParams(self, form):
2645
+ """ This function can be called during the _defineParams method
2646
+ of some protocols that support stream processing.
2647
+ It will add a Streaming section together with the following
2648
+ params:
2649
+ streamingSleepOnWait: Some streaming protocols are quite fast,
2650
+ so, checking input/output updates creates an IO overhead.
2651
+ This params allows them to sleep (without consuming resources)
2652
+ to wait for new work to be done.
2653
+ """
2654
+ form.addSection("Streaming")
2655
+ form.addParam("streamingSleepOnWait", IntParam, default=10,
2656
+ label="Sleep when waiting (secs)",
2657
+ help="If you specify a value greater than zero, "
2658
+ "it will be the number of seconds that the "
2659
+ "protocol will sleep when waiting for new "
2660
+ "input data in streaming mode. ")
2661
+
2662
+ def _insertAllSteps(self):
2663
+ """ Insert the step that generates the steps """
2664
+ self._insertFunctionStep(self.resumableStepGeneratorStep, str(datetime.now()), needsGPU=False)
2665
+
2666
+ def resumableStepGeneratorStep(self, ts):
2667
+ """ This allow to resume protocols. ts is the time stamp so this stap is always different form previous execution"""
2668
+ self.stepsGeneratorStep()
2669
+
2670
+ def _stepsCheck(self):
2671
+ """ Just store steps created in checkNewInputStep"""
2672
+ if self._newSteps:
2673
+ self.updateSteps()
2674
+
2675
+ def stepsGeneratorStep(self):
2676
+ """
2677
+ This step should be implemented by any streaming protocol.
2678
+ It should check its input and when ready conditions are met
2679
+ call the self._insertFunctionStep method.
2680
+
2681
+ :return: None
2682
+ """
2683
+ pass
2684
+
2685
+ def _getStreamingSleepOnWait(self):
2686
+ """ Retrieves the configured sleep duration for waiting during streaming.
2687
+ Returns:
2688
+ - int: The sleep duration in seconds during streaming wait.
2689
+ """
2690
+ return self.getAttributeValue('streamingSleepOnWait', 0)
2691
+
2692
+ def _streamingSleepOnWait(self):
2693
+ """ This method should be used by protocols that want to sleep
2694
+ when there is not more work to do.
2695
+ """
2696
+ sleepOnWait = self._getStreamingSleepOnWait()
2697
+ if sleepOnWait > 0:
2698
+ self.info("Waiting %s now before checking again for new input" % sleepOnWait)
2699
+ time.sleep(sleepOnWait)
2700
+
2701
+ def _validateThreads(self, messages: list):
2702
+
2703
+ if self.numberOfThreads.get() < 2:
2704
+ messages.append("At least 2 threads are needed for running this protocol. "
2705
+ "1 for the 'stepsGenerator step' and one more for the actual processing" )
2706
+
2707
+ def _validate(self):
2708
+ """ If you want to implement a validate method do it but call _validateThreads or validate threads value."""
2709
+ errors = []
2710
+ self._validateThreads(errors)
2711
+
2712
+ return errors