scipion-pyworkflow 3.10.5__py3-none-any.whl → 3.10.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyworkflow/constants.py CHANGED
@@ -43,7 +43,7 @@ VERSION_1 = '1.0.0'
43
43
  VERSION_1_1 = '1.1.0'
44
44
  VERSION_1_2 = '1.2.0'
45
45
  VERSION_2_0 = '2.0.0'
46
- VERSION_3_0 = '3.10.5'
46
+ VERSION_3_0 = '3.10.6'
47
47
 
48
48
  # For a new release, define a new constant and assign it to LAST_VERSION
49
49
  # The existing one has to be added to OLD_VERSIONS list.
@@ -73,6 +73,16 @@ SCIPION_HOME_VAR = 'SCIPION_HOME'
73
73
  SCIPION_TESTS = 'SCIPION_TESTS'
74
74
  SCIPION_SCRATCH = 'SCIPION_SCRATCH'
75
75
 
76
+ # VARIABLE names in host.conf
77
+ PARALLEL_COMMAND_VAR = 'PARALLEL_COMMAND'
78
+ PLUGIN_MODULE_VAR = 'PLUGIN_MODULE'
79
+ QUEUE_FOR_JOBS = 'QUEUE_FOR_JOBS'
80
+
81
+
82
+ # Launching constants
83
+ RUN_JOB_GPU_PARAM = 'GPU' # Param name to "place" GPU ids used to build a run command.
84
+ RUN_JOB_GPU_PARAM_SEARCH = "%("+ RUN_JOB_GPU_PARAM +")s"
85
+
76
86
  # FONT
77
87
  SCIPION_DEFAULT_FONT_SIZE = 10
78
88
 
@@ -215,4 +225,3 @@ DEFAULT_EXECUTION_ACTION_ALL = 3
215
225
  # Id field/attribute constants
216
226
  ID_COLUMN='id'
217
227
  ID_ATTRIBUTE='_objId'
218
-
pyworkflow/gui/form.py CHANGED
@@ -1743,7 +1743,7 @@ class FormWindow(Window):
1743
1743
  entry = self._createBoundEntry(procFrame, pwutils.Message.VAR_MPI)
1744
1744
  entry.grid(row=r2, column=c2 + 1, padx=(0, 5), sticky='w')
1745
1745
 
1746
- helpMessage += pwutils.Message.HELP_PARALLEL_MPI
1746
+ helpMessage += '\n' + pwutils.Message.HELP_PARALLEL_MPI
1747
1747
 
1748
1748
 
1749
1749
  btnHelp = IconButton(procFrame, pwutils.Message.TITLE_COMMENT,
@@ -139,7 +139,7 @@ class SearchProtocolWindow(SearchBaseWindow):
139
139
  def _addProtocolToTree(self, protList):
140
140
  """ Adds the items in protList to the tree
141
141
 
142
- :param protList: List of tuples with all the values/colunms used in search ans shown in the tree"""
142
+ :param protList: List of tuples with all the values/columns used in the search search to show in the tree"""
143
143
 
144
144
  for key, label, installed, help, streamified, beta, new, updated, weight in protList:
145
145
  tag = ProtocolTreeConfig.getProtocolTag(installed == 'installed',
@@ -167,7 +167,7 @@ class RunIOTreeProvider(pwgui.tree.TreeProvider):
167
167
  that is more readable for the user to pick the desired object.
168
168
  """
169
169
  label = 'None'
170
- if obj:
170
+ if obj is not None:
171
171
  label = obj.getObjLabel()
172
172
  if not len(label.strip()):
173
173
  parentLabel = parent.getObjLabel() if parent else 'None'
@@ -241,7 +241,6 @@ class RunIOTreeProvider(pwgui.tree.TreeProvider):
241
241
 
242
242
  return infoPtr
243
243
 
244
-
245
244
  if obj is None or not obj.hasValue():
246
245
  return None
247
246
 
@@ -1027,13 +1027,16 @@ class SqliteFlatMapper(Mapper):
1027
1027
 
1028
1028
  return obj
1029
1029
 
1030
- def __iterObjectsFromRows(self, objRows, objectFilter=None):
1030
+ def __iterObjectsFromRows(self, objRows, objectFilter=None, rowFilter=None):
1031
1031
  for objRow in objRows:
1032
+ if rowFilter and not rowFilter(objRow):
1033
+ continue
1034
+
1032
1035
  obj = self.__objFromRow(objRow)
1033
1036
  if objectFilter is None or objectFilter(obj):
1034
1037
  yield obj
1035
1038
 
1036
- def __objectsFromRows(self, objRows, iterate=False, objectFilter=None):
1039
+ def __objectsFromRows(self, objRows, iterate=False, objectFilter=None, rowFilter=None):
1037
1040
  """Create a set of object from a set of rows
1038
1041
  Params:
1039
1042
  objRows: rows result from a db select.
@@ -1043,9 +1046,9 @@ class SqliteFlatMapper(Mapper):
1043
1046
  """
1044
1047
  if not iterate:
1045
1048
  return [obj.clone()
1046
- for obj in self.__iterObjectsFromRows(objRows, objectFilter)]
1049
+ for obj in self.__iterObjectsFromRows(objRows, objectFilter, rowFilter)]
1047
1050
  else:
1048
- return self.__iterObjectsFromRows(objRows, objectFilter)
1051
+ return self.__iterObjectsFromRows(objRows, objectFilter, rowFilter)
1049
1052
 
1050
1053
  def selectBy(self, iterate=False, objectFilter=None, **args):
1051
1054
  """Select object meetings some criteria"""
@@ -1053,7 +1056,7 @@ class SqliteFlatMapper(Mapper):
1053
1056
  return self.__objectsFromRows(objRows, iterate, objectFilter)
1054
1057
 
1055
1058
  def selectAll(self, iterate=True, objectFilter=None, orderBy=ID,
1056
- direction='ASC', where='1', limit=None):
1059
+ direction='ASC', where='1', limit=None, rowFilter=None):
1057
1060
  # Just a sanity check for emtpy sets, that doesn't contains
1058
1061
  # 'Properties' table
1059
1062
  if not self.db.hasTable('Properties'):
@@ -1076,7 +1079,7 @@ and restarting scipion. Export command:
1076
1079
  export SQLITE_TMPDIR=. """ % str(e)
1077
1080
  raise OperationalError(msg)
1078
1081
 
1079
- return self.__objectsFromRows(objRows, iterate, objectFilter)
1082
+ return self.__objectsFromRows(objRows, iterate, objectFilter, rowFilter)
1080
1083
 
1081
1084
  def unique(self, labels, where=None):
1082
1085
  """ Returns a list (for a single label) or a dictionary with unique values for the passed labels.
pyworkflow/object.py CHANGED
@@ -1233,12 +1233,13 @@ class Set(Object):
1233
1233
  return self._getMapper().exists(itemId)
1234
1234
 
1235
1235
  def iterItems(self, orderBy='id', direction='ASC', where=None,
1236
- limit=None, iterate=True):
1236
+ limit=None, iterate=True, rowFilter=None):
1237
1237
  return self._getMapper().selectAll(orderBy=orderBy,
1238
1238
  direction=direction,
1239
1239
  where=where,
1240
1240
  limit=limit,
1241
- iterate=iterate) # has flat mapper, iterate is true
1241
+ iterate=iterate,
1242
+ rowFilter=rowFilter) # has flat mapper, iterate is true
1242
1243
 
1243
1244
  def getFirstItem(self):
1244
1245
  """ Return the first item in the Set. """
pyworkflow/plugin.py CHANGED
@@ -508,7 +508,7 @@ class Domain:
508
508
 
509
509
  raiseMsg = "%s\n %s\n%s\n" % (msgStr, calling, hint)
510
510
  if doRaise:
511
- raise Exception("\n\n" + raiseMsg)
511
+ raise ImportError("\n\n" + raiseMsg)
512
512
  else:
513
513
  logger.info(raiseMsg)
514
514
 
@@ -48,7 +48,7 @@ from pyworkflow.mapper import SqliteMapper
48
48
  from pyworkflow.protocol.constants import (MODE_RESTART, MODE_RESUME,
49
49
  STATUS_INTERACTIVE, ACTIVE_STATUS,
50
50
  UNKNOWN_JOBID, INITIAL_SLEEP_TIME, STATUS_FINISHED)
51
- from pyworkflow.protocol.protocol import ProtImportBase, Protocol
51
+ from pyworkflow.protocol.protocol import Protocol
52
52
 
53
53
  from . import config
54
54
 
@@ -306,14 +306,17 @@ class Project(object):
306
306
  if creationTime: # CreationTime was found in project.sqlite
307
307
  ctStr = creationTime[0] # This is our String type instance
308
308
 
309
- # We store it in mem as dateime
309
+ # We store it in mem as datetime
310
310
  self._creationTime = ctStr
311
311
 
312
312
  else:
313
- # We should read the creation time from settings.sqlite and
314
- # update the CreationTime in the project.sqlite
315
- self._creationTime = pwobj.String(self.getSettingsCreationTime())
316
- self._storeCreationTime()
313
+
314
+ # If connected to project.sqlite and not any or the run.db
315
+ if self.path.endswith(PROJECT_DBNAME):
316
+ # We should read the creation time from settings.sqlite and
317
+ # update the CreationTime in the project.sqlite
318
+ self._creationTime = pwobj.String(self.getSettingsCreationTime())
319
+ self._storeCreationTime()
317
320
 
318
321
  # ---- Helper functions to load different pieces of a project
319
322
  def _loadDb(self, dbPath):
@@ -1961,37 +1964,31 @@ class Project(object):
1961
1964
  self.settings.setReadOnly(value)
1962
1965
 
1963
1966
  def fixLinks(self, searchDir):
1964
- logger.info("Fixing project links. Searching at %s" % searchDir)
1967
+ logger.info(f"Fixing links for project {self.getShortName()}. Searching in: {searchDir}")
1965
1968
  runs = self.getRuns()
1966
1969
 
1970
+ counter = 0
1967
1971
  for prot in runs:
1968
- print (prot)
1969
- broken = False
1970
- if isinstance(prot, ProtImportBase) or prot.getClassName() == "ProtImportMovies":
1971
- logger.info("Import detected")
1972
- for _, attr in prot.iterOutputAttributes():
1973
- for f in attr.getFiles():
1974
- if ':' in f:
1975
- f = f.split(':')[0]
1976
-
1977
- if not os.path.exists(f):
1978
- if not broken:
1979
- broken = True
1980
- logger.info("Found broken links in run: %s" %
1981
- pwutils.magenta(prot.getRunName()))
1982
- logger.info(" Missing: %s" % pwutils.magenta(f))
1983
-
1984
- if os.path.islink(f):
1985
- sourceFile = os.path.realpath(f)
1986
- logger.info(" -> %s" % pwutils.red(sourceFile))
1987
-
1988
- newFile = pwutils.findFile(os.path.basename(sourceFile),
1989
- searchDir,
1990
- recursive=True)
1991
- if newFile:
1992
- logger.info(" Found file %s, creating link... %s" % (newFile,
1993
- pwutils.green(" %s -> %s" % (f, newFile))))
1994
- pwutils.createAbsLink(newFile, f)
1972
+ if prot.getClassName().startswith("ProtImport"):
1973
+ runName = prot.getRunName()
1974
+ logger.info(f"Found protocol {runName}")
1975
+ for f in prot.getOutputFiles():
1976
+ if ':' in f:
1977
+ f = f.split(':')[0]
1978
+
1979
+ if not os.path.exists(f):
1980
+ logger.info(f"\tMissing link: {f}")
1981
+
1982
+ if os.path.islink(f):
1983
+ sourceFile = os.path.realpath(f)
1984
+ newFile = pwutils.findFileRecursive(os.path.basename(sourceFile),
1985
+ searchDir)
1986
+ if newFile:
1987
+ counter += 1
1988
+ logger.info(f"\t\tCreating link: {f} -> {newFile}")
1989
+ pwutils.createAbsLink(newFile, f)
1990
+
1991
+ logger.info(f"Fixed {counter} broken links")
1995
1992
 
1996
1993
  @staticmethod
1997
1994
  def cleanProjectName(projectName):
@@ -3,12 +3,15 @@
3
3
  import sys
4
4
  import os
5
5
  import logging
6
- logging.basicConfig(level="INFO")
7
6
 
7
+ from pyworkflow.config import Config
8
8
  from pyworkflow.project import Manager
9
9
  import pyworkflow.utils as pwutils
10
10
 
11
11
 
12
+ logging.basicConfig(level=Config.SCIPION_LOG_LEVEL, format=Config.SCIPION_LOG_FORMAT)
13
+
14
+
12
15
  def usage(error):
13
16
  print("""
14
17
  ERROR: %s
@@ -84,4 +84,4 @@ UNKNOWN_JOBID = -1
84
84
  SIZE_1KB = 1024
85
85
  SIZE_1MB = SIZE_1KB * SIZE_1KB
86
86
  SIZE_1GB = SIZE_1MB * SIZE_1KB
87
- SIZE_1TB = SIZE_1GB * SIZE_1KB
87
+ SIZE_1TB = SIZE_1GB * SIZE_1KB
@@ -39,6 +39,7 @@ import os
39
39
 
40
40
  import pyworkflow.utils.process as process
41
41
  from pyworkflow.utils.path import getParentFolder, removeExt
42
+ from pyworkflow.constants import PLUGIN_MODULE_VAR, RUN_JOB_GPU_PARAM_SEARCH
42
43
  from . import constants as cts
43
44
 
44
45
  from .launch import _submit, UNKNOWN_JOBID, _checkJobStatus
@@ -59,16 +60,26 @@ class StepExecutor:
59
60
  """ Set protocol to append active jobs to its jobIds. """
60
61
  self.protocol = protocol
61
62
 
62
- def runJob(self, log, programName, params,
63
+ def getRunContext(self):
64
+ return {PLUGIN_MODULE_VAR: self.protocol.getPlugin().getName()}
65
+
66
+ def runJob(self, log, programName, params,
63
67
  numberOfMpi=1, numberOfThreads=1,
64
68
  env=None, cwd=None, executable=None):
65
69
  """ This function is a wrapper around runJob,
66
70
  providing the host configuration.
67
71
  """
68
72
  process.runJob(log, programName, params,
69
- numberOfMpi, numberOfThreads,
73
+ numberOfMpi, numberOfThreads,
70
74
  self.hostConfig,
71
- env=env, cwd=cwd, gpuList=self.getGpuList(), executable=executable)
75
+ env=env, cwd=cwd, gpuList=self._getGPUListForCommand(programName, params), executable=executable, context=self.protocol.getSubmitDict())
76
+
77
+ def _getGPUListForCommand(self, program, params):
78
+ """ Returns the list of GPUs if the program or the params have the GPU placeholder %(GPU)s """
79
+ if RUN_JOB_GPU_PARAM_SEARCH in params or RUN_JOB_GPU_PARAM_SEARCH in program:
80
+ return self.getGpuList()
81
+ else:
82
+ return []
72
83
 
73
84
  def _getRunnable(self, steps, n=1):
74
85
  """ Return the n steps that are 'new' and all its
@@ -258,7 +269,7 @@ class ThreadStepExecutor(StepExecutor):
258
269
 
259
270
  gpus = self.getFreeGpuSlot(nodeId)
260
271
  if gpus is None:
261
- logger.warning("Step on node %s is requesting GPUs but there isn't any available. Review configuration of threads/GPUs. Returning and empty list." % nodeId)
272
+ logger.warning("Step on node %s is requesting GPUs but there isn't any available. Review configuration of threads/GPUs. Returning an empty list." % nodeId)
262
273
  return []
263
274
  else:
264
275
  return gpus
@@ -422,15 +433,20 @@ class QueueStepExecutor(ThreadStepExecutor):
422
433
  threadId = threading.current_thread().thId
423
434
  submitDict = dict(self.hostConfig.getQueuesDefault())
424
435
  submitDict.update(self.submitDict)
425
- submitDict['JOB_COMMAND'] = process.buildRunCommand(programName, params, numberOfMpi,
426
- self.hostConfig, env,
427
- gpuList=self.getGpuList())
428
436
  threadJobId = self.getThreadJobId(threadId)
429
437
  subthreadId = '-%s-%s' % (threadId, threadJobId)
430
438
  submitDict['JOB_NAME'] = submitDict['JOB_NAME'] + subthreadId
431
439
  submitDict['JOB_SCRIPT'] = os.path.abspath(removeExt(submitDict['JOB_SCRIPT']) + subthreadId + ".job")
432
440
  submitDict['JOB_LOGS'] = os.path.join(getParentFolder(submitDict['JOB_SCRIPT']), submitDict['JOB_NAME'])
433
441
 
442
+ logger.debug("Variables available for replacement in submission command are: %s" % submitDict)
443
+
444
+ submitDict['JOB_COMMAND'] = process.buildRunCommand(programName, params, numberOfMpi,
445
+ self.hostConfig, env,
446
+ gpuList=self._getGPUListForCommand(programName, params),
447
+ context=submitDict)
448
+
449
+
434
450
  jobid = _submit(self.hostConfig, submitDict, cwd, env)
435
451
  self.protocol.appendJobId(jobid) # append active jobs
436
452
  self.protocol._store(self.protocol._jobId)
@@ -36,6 +36,7 @@ from configparser import RawConfigParser
36
36
  from collections import OrderedDict
37
37
 
38
38
  import pyworkflow as pw
39
+ from pyworkflow import PARALLEL_COMMAND_VAR
39
40
  from pyworkflow.object import Object, String, Integer
40
41
 
41
42
 
@@ -199,7 +200,7 @@ class HostConfig(Object):
199
200
  # Read the address of the remote hosts,
200
201
  # using 'localhost' as default for backward compatibility
201
202
  host.setAddress(get('ADDRESS', 'localhost'))
202
- host.mpiCommand.set(get('PARALLEL_COMMAND'))
203
+ host.mpiCommand.set(get(PARALLEL_COMMAND_VAR))
203
204
  host.queueSystem = QueueSystemConfig()
204
205
  hostQueue = host.queueSystem # shortcut
205
206
  hostQueue.name.set(get('NAME'))
@@ -36,13 +36,15 @@ from pyworkflow.exceptions import ValidationException, PyworkflowException
36
36
  from pyworkflow.object import *
37
37
  import pyworkflow.utils as pwutils
38
38
  from pyworkflow.utils.log import getExtraLogInfo, STATUS, setDefaultLoggingContext
39
+ from pyworkflow.constants import PLUGIN_MODULE_VAR, QUEUE_FOR_JOBS
39
40
  from .executor import StepExecutor, ThreadStepExecutor, QueueStepExecutor
40
41
  from .constants import *
41
- from .params import Form
42
+ from .params import Form, IntParam
42
43
  from ..utils import getFileSize
43
44
 
44
45
 
45
46
  import logging
47
+
46
48
  # Get the root logger
47
49
  logger = logging.getLogger(__name__)
48
50
 
@@ -67,6 +69,7 @@ class Step(Object):
67
69
 
68
70
  def needsGPU(self) -> bool:
69
71
  return self._needsGPU.get()
72
+
70
73
  def getIndex(self):
71
74
  return self._index
72
75
 
@@ -1117,10 +1120,7 @@ class Protocol(Step):
1117
1120
  def _getRelPathExecutionDir(self, *path):
1118
1121
  """ Return a relative path from the projdir. """
1119
1122
  # TODO must be a bettis
1120
- return os.path.relpath(
1121
- self._getPath(*path),
1122
- os.path.dirname(os.path.dirname(self.workingDir.get()))
1123
- )
1123
+ return os.path.relpath(self._getPath(*path), os.path.dirname(os.path.dirname(self.workingDir.get())))
1124
1124
 
1125
1125
  def _getBasePath(self, path):
1126
1126
  """ Take the basename of the path and get the path
@@ -1617,7 +1617,6 @@ class Protocol(Step):
1617
1617
  prot_id=self.getObjId(),
1618
1618
  prot_name=self.getClassName()))
1619
1619
 
1620
-
1621
1620
  def getLogPaths(self):
1622
1621
  return [self.getStdoutLog(),self.getStderrLog() , self.getScheduleLog()]
1623
1622
 
@@ -1638,7 +1637,6 @@ class Protocol(Step):
1638
1637
  """ Return the steps.sqlite file under logs directory. """
1639
1638
  return self._getLogsPath('steps.sqlite')
1640
1639
 
1641
-
1642
1640
  def _addChunk(self, txt, fmt=None):
1643
1641
  """
1644
1642
  Add text txt to self._buffer, with format fmt.
@@ -1921,21 +1919,26 @@ class Protocol(Step):
1921
1919
  queueName, queueParams = self.getQueueParams()
1922
1920
  hc = self.getHostConfig()
1923
1921
 
1924
- script = self._getLogsPath(hc.getSubmitPrefix() + self.strId() + '.job')
1925
- d = {'JOB_SCRIPT': script,
1926
- 'JOB_LOGS': self._getLogsPath(hc.getSubmitPrefix() + self.strId()),
1927
- 'JOB_NODEFILE': os.path.abspath(script.replace('.job', '.nodefile')),
1928
- 'JOB_NAME': self.strId(),
1922
+ d = {'JOB_NAME': self.strId(),
1929
1923
  'JOB_QUEUE': queueName,
1930
1924
  'JOB_NODES': self.numberOfMpi.get(),
1931
1925
  'JOB_THREADS': self.numberOfThreads.get(),
1932
1926
  'JOB_CORES': self.numberOfMpi.get() * self.numberOfThreads.get(),
1933
1927
  'JOB_HOURS': 72,
1934
1928
  'GPU_COUNT': len(self.getGpuList()),
1935
- 'QUEUE_FOR_JOBS': 'N',
1929
+ QUEUE_FOR_JOBS: 'N',
1936
1930
  'SCIPION_PROJECT': "SCIPION_PROJECT", # self.getProject().getShortName(),
1937
- 'SCIPION_PROTOCOL': self.getRunName()
1931
+ 'SCIPION_PROTOCOL': self.getRunName(),
1932
+ PLUGIN_MODULE_VAR: self.getPlugin().getName()
1938
1933
  }
1934
+
1935
+ # Criteria in HostConfig.load to load or not QUEUE variables
1936
+ if hc.getQueueSystem().hasName():
1937
+ job_logs = self._getLogsPath(hc.getSubmitPrefix() + self.strId())
1938
+ d['JOB_SCRIPT'] = job_logs + '.job'
1939
+ d['JOB_LOGS'] = job_logs
1940
+ d['JOB_NODEFILE'] = os.path.abspath(job_logs +'.nodefile')
1941
+
1939
1942
  d.update(queueParams)
1940
1943
  return d
1941
1944
 
@@ -1946,12 +1949,12 @@ class Protocol(Step):
1946
1949
  def useQueueForSteps(self):
1947
1950
  """ This function will return True if the protocol has been set
1948
1951
  to be launched through a queue by steps """
1949
- return self.useQueue() and (self.getSubmitDict()["QUEUE_FOR_JOBS"] == "Y")
1952
+ return self.useQueue() and (self.getSubmitDict()[QUEUE_FOR_JOBS] == "Y")
1950
1953
 
1951
1954
  def useQueueForProtocol(self):
1952
1955
  """ This function will return True if the protocol has been set
1953
1956
  to be launched through a queue """
1954
- return self.useQueue() and (self.getSubmitDict()["QUEUE_FOR_JOBS"] == "N")
1957
+ return self.useQueue() and (self.getSubmitDict()[QUEUE_FOR_JOBS] != "Y")
1955
1958
 
1956
1959
  def getQueueParams(self):
1957
1960
  if self._queueParams.hasValue():
@@ -2441,6 +2444,11 @@ def runProtocolMain(projectPath, protDbPath, protId):
2441
2444
  setDefaultLoggingContext(protId, protocol.getProject().getShortName())
2442
2445
 
2443
2446
  hostConfig = protocol.getHostConfig()
2447
+ gpuList = protocol.getGpuList()
2448
+
2449
+ #If queue is to be used
2450
+ if protocol.useQueue():
2451
+ gpuList = anonimizeGPUs(gpuList)
2444
2452
 
2445
2453
  # Create the steps executor
2446
2454
  executor = None
@@ -2451,18 +2459,18 @@ def runProtocolMain(projectPath, protDbPath, protId):
2451
2459
  executor = QueueStepExecutor(hostConfig,
2452
2460
  protocol.getSubmitDict(),
2453
2461
  nThreads - 1,
2454
- gpuList=protocol.getGpuList())
2462
+ gpuList=gpuList)
2455
2463
  else:
2456
2464
  executor = ThreadStepExecutor(hostConfig, nThreads - 1,
2457
- gpuList=protocol.getGpuList())
2465
+ gpuList=gpuList)
2458
2466
 
2459
2467
  if executor is None and protocol.useQueueForSteps():
2460
2468
  executor = QueueStepExecutor(hostConfig, protocol.getSubmitDict(), 1,
2461
- gpuList=protocol.getGpuList())
2469
+ gpuList=gpuList)
2462
2470
 
2463
2471
  if executor is None:
2464
2472
  executor = StepExecutor(hostConfig,
2465
- gpuList=protocol.getGpuList())
2473
+ gpuList=gpuList)
2466
2474
 
2467
2475
  logger.info("Running protocol using the %s executor." % executor)
2468
2476
  protocol.setStepsExecutor(executor)
@@ -2470,6 +2478,22 @@ def runProtocolMain(projectPath, protDbPath, protId):
2470
2478
  protocol.run()
2471
2479
 
2472
2480
 
2481
+ def anonimizeGPUs(gpuList):
2482
+
2483
+ renamedGPUs=dict()
2484
+ anonimousGPUs = []
2485
+
2486
+ for gpu in gpuList:
2487
+
2488
+ if gpu not in renamedGPUs:
2489
+ renamedGPUs[gpu] = len(renamedGPUs)
2490
+
2491
+ anonimousGPUs.append(renamedGPUs[gpu])
2492
+
2493
+ return anonimousGPUs
2494
+
2495
+
2496
+
2473
2497
  def getProtocolFromDb(projectPath, protDbPath, protId, chdir=False):
2474
2498
  """ Retrieve the Protocol object from a given .sqlite file
2475
2499
  and the protocol id.
@@ -2535,6 +2559,7 @@ def isProtocolUpToDate(protocol):
2535
2559
  class ProtImportBase(Protocol):
2536
2560
  """ Base Import protocol"""
2537
2561
 
2562
+
2538
2563
  class ProtStreamingBase(Protocol):
2539
2564
  """ Base protocol to implement streaming protocols.
2540
2565
  stepsGeneratorStep should be implemented (see its description) and output
@@ -2544,18 +2569,35 @@ class ProtStreamingBase(Protocol):
2544
2569
  """
2545
2570
 
2546
2571
  stepsExecutionMode = STEPS_PARALLEL
2572
+
2573
+ def _defineStreamingParams(self, form):
2574
+ """ This function can be called during the _defineParams method
2575
+ of some protocols that support stream processing.
2576
+ It will add a Streaming section together with the following
2577
+ params:
2578
+ streamingSleepOnWait: Some streaming protocols are quite fast,
2579
+ so, checking input/output updates creates an IO overhead.
2580
+ This params allows them to sleep (without consuming resources)
2581
+ to wait for new work to be done.
2582
+ """
2583
+ form.addSection("Streaming")
2584
+ form.addParam("streamingSleepOnWait", IntParam, default=10,
2585
+ label="Sleep when waiting (secs)",
2586
+ help="If you specify a value greater than zero, "
2587
+ "it will be the number of seconds that the "
2588
+ "protocol will sleep when waiting for new "
2589
+ "input data in streaming mode. ")
2590
+
2547
2591
  def _insertAllSteps(self):
2548
- # Insert the step that generates the steps
2592
+ """ Insert the step that generates the steps """
2549
2593
  self._insertFunctionStep(self.resumableStepGeneratorStep, str(datetime.now()), needsGPU=False)
2550
2594
 
2551
2595
  def resumableStepGeneratorStep(self, ts):
2552
- """ This allow to resume protocols. ts is the time stamp so this stap is alway different form previous exceution"""
2596
+ """ This allow to resume protocols. ts is the time stamp so this stap is always different form previous execution"""
2553
2597
  self.stepsGeneratorStep()
2554
2598
 
2555
-
2556
2599
  def _stepsCheck(self):
2557
-
2558
- # Just store steps created in checkNewInputStep
2600
+ """ Just store steps created in checkNewInputStep"""
2559
2601
  if self._newSteps:
2560
2602
  self.updateSteps()
2561
2603
 
@@ -2569,11 +2611,28 @@ class ProtStreamingBase(Protocol):
2569
2611
  """
2570
2612
  pass
2571
2613
 
2572
- def _validateThreads(self, messages:list):
2614
+ def _getStreamingSleepOnWait(self):
2615
+ """ Retrieves the configured sleep duration for waiting during streaming.
2616
+ Returns:
2617
+ - int: The sleep duration in seconds during streaming wait.
2618
+ """
2619
+ return self.getAttributeValue('streamingSleepOnWait', 0)
2620
+
2621
+ def _streamingSleepOnWait(self):
2622
+ """ This method should be used by protocols that want to sleep
2623
+ when there is not more work to do.
2624
+ """
2625
+ sleepOnWait = self._getStreamingSleepOnWait()
2626
+ if sleepOnWait > 0:
2627
+ self.info("Waiting %s now before checking again for new input" % sleepOnWait)
2628
+ time.sleep(sleepOnWait)
2629
+
2630
+ def _validateThreads(self, messages: list):
2573
2631
 
2574
2632
  if self.numberOfThreads.get() < 2:
2575
2633
  messages.append("At least 2 threads are needed for running this protocol. "
2576
2634
  "1 for the 'stepsGenerator step' and one more for the actual processing" )
2635
+
2577
2636
  def _validate(self):
2578
2637
  """ If you want to implement a validate method do it but call _validateThreads or validate threads value."""
2579
2638
  errors = []
@@ -37,15 +37,16 @@ import psutil
37
37
 
38
38
  from .utils import greenStr
39
39
  from pyworkflow import Config
40
+ from pyworkflow.constants import PLUGIN_MODULE_VAR, PARALLEL_COMMAND_VAR, RUN_JOB_GPU_PARAM
40
41
 
41
42
 
42
43
  # The job should be launched from the working directory!
43
44
  def runJob(log, programname, params,
44
45
  numberOfMpi=1, numberOfThreads=1,
45
- hostConfig=None, env=None, cwd=None, gpuList=None, executable=None):
46
+ hostConfig=None, env=None, cwd=None, gpuList=None, executable=None, context=dict()):
46
47
 
47
48
  command = buildRunCommand(programname, params, numberOfMpi, hostConfig,
48
- env, gpuList=gpuList)
49
+ env, gpuList=gpuList,context=context)
49
50
 
50
51
  if log is None:
51
52
  log = logger
@@ -74,8 +75,10 @@ def runCommand(command, env=None, cwd=None, executable=None):
74
75
 
75
76
 
76
77
  def buildRunCommand(programname, params, numberOfMpi, hostConfig=None,
77
- env=None, gpuList=None):
78
- """ Return a string with the command line to run """
78
+ env=None, gpuList=None, context=dict()):
79
+ """ Return a string with the command line to run
80
+
81
+ :param context: a dictionary with extra context variable to make run command more flexible"""
79
82
 
80
83
  # Convert our list of params to a string, with each element escaped
81
84
  # with "" in case there are spaces.
@@ -83,10 +86,10 @@ def buildRunCommand(programname, params, numberOfMpi, hostConfig=None,
83
86
  params = ' '.join('"%s"' % p for p in params)
84
87
 
85
88
  if gpuList:
86
- params = params % {'GPU': ' '.join(str(g) for g in gpuList)}
89
+ params = params % {RUN_JOB_GPU_PARAM: ' '.join(str(g) for g in gpuList)}
87
90
  if "CUDA_VISIBLE_DEVICES" in programname:
88
91
  sep = "," if len(gpuList) > 1 else ""
89
- programname = programname % {'GPU': sep.join(str(g) for g in gpuList)}
92
+ programname = programname % {RUN_JOB_GPU_PARAM: sep.join(str(g) for g in gpuList)}
90
93
 
91
94
  prepend = '' if env is None else env.getPrepend()
92
95
 
@@ -100,10 +103,28 @@ def buildRunCommand(programname, params, numberOfMpi, hostConfig=None,
100
103
 
101
104
  mpiFlags = '' if env is None else env.get('SCIPION_MPI_FLAGS', '')
102
105
 
103
- mpiCmd = hostConfig.mpiCommand.get() % {
106
+ context.update({
104
107
  'JOB_NODES': numberOfMpi,
105
108
  'COMMAND': "%s `which %s` %s" % (mpiFlags, programname, params),
106
- }
109
+ })
110
+ logger.debug("Context variables for mpi command are: %s" % context)
111
+
112
+ mpiCommand = hostConfig.mpiCommand.get()
113
+ pluginModule = context.get(PLUGIN_MODULE_VAR, None)
114
+
115
+ if pluginModule is not None:
116
+ custom_command_var = PARALLEL_COMMAND_VAR + "_" + pluginModule.upper()
117
+ if custom_command_var in env:
118
+ mpiCommand = env.get(custom_command_var)
119
+ logger.info("Custom mpi command for this plugin found. Using it. %s: %s"
120
+ % (custom_command_var, mpiCommand))
121
+ else:
122
+ logger.info("%s not found in the environment. Using default mpi command found in %s. %s: %s"
123
+ % (custom_command_var, Config.SCIPION_HOSTS, PARALLEL_COMMAND_VAR, mpiCommand))
124
+
125
+
126
+ mpiCmd = mpiCommand % context
127
+
107
128
  return '%s %s' % (prepend, mpiCmd)
108
129
 
109
130
 
@@ -640,10 +640,10 @@ class MockSetOfImages(MockSet):
640
640
  """ Return the string representing the dimensions. """
641
641
  return str(self._firstDim)
642
642
 
643
- def iterItems(self, orderBy='id', direction='ASC', where=None, limit=None):
643
+ def iterItems(self, orderBy='id', direction='ASC', where=None, limit=None, rowFilter=None):
644
644
  """ Redefine iteration to set the acquisition to images. """
645
645
  for img in pwobj.Set.iterItems(self, orderBy=orderBy, direction=direction,
646
- where=where, limit=limit):
646
+ where=where, limit=limit, rowFilter=rowFilter):
647
647
 
648
648
  # Sometimes the images items in the set could
649
649
  # have the acquisition info per data row and we
@@ -73,6 +73,13 @@ class TestProtocolExecution(pwtests.BaseTest):
73
73
 
74
74
  self.assertEqual(prot.endTime.get(), prot2.endTime.get())
75
75
 
76
+ def test_gpu_anonimization(self):
77
+
78
+ self.assertEqual(pwprot.anonimizeGPUs([0, 1, 2]),[0, 1, 2], "Anonimization of GPUs does not work")
79
+ self.assertEqual(pwprot.anonimizeGPUs([2, 1, 0]), [0, 1, 2], "Anonimization of GPUs does not work")
80
+ self.assertEqual(pwprot.anonimizeGPUs([2, 1, 2]), [0, 1, 0], "Anonimization of GPUs does not work")
81
+ self.assertEqual(pwprot.anonimizeGPUs([2, 1, 2, 4]), [0, 1, 0, 2], "Anonimization of GPUs does not work")
82
+
76
83
  def test_gpuSlots(self):
77
84
  """ Test gpu slots are properly composed in combination of threads"""
78
85
 
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: scipion-pyworkflow
3
- Version: 3.10.5
3
+ Version: 3.10.6
4
4
  Summary: Simple workflow platform used in scientific applications, initially developed within the Scipion framework for image processing in Electron Microscopy.
5
5
  Home-page: https://github.com/scipion-em/scipion-pyworkflow
6
6
  Author: J.M. De la Rosa Trevin, Roberto Marabini, Grigory Sharov, Josue Gomez Blanco, Pablo Conesa, Yunior Fonseca Reyna
@@ -20,16 +20,26 @@ Classifier: Topic :: Scientific/Engineering
20
20
  License-File: LICENSE.txt
21
21
  Requires-Dist: bibtexparser<=1.4.1
22
22
  Requires-Dist: psutil<=5.9.6
23
- Requires-Dist: tkcolorpicker
24
- Requires-Dist: distro<=1.8
25
- Requires-Dist: importlib-metadata<=6.8.0
26
- Requires-Dist: matplotlib==3.7.3; python_version == "3.8"
27
- Requires-Dist: numpy==1.24.4; python_version == "3.8"
28
23
  Requires-Dist: configparser==6.0.0; python_version >= "3.8"
29
- Requires-Dist: pillow==10.1.0; python_version >= "3.8"
30
- Requires-Dist: requests==2.31.0; python_version >= "3.8"
24
+ Requires-Dist: matplotlib==3.7.3; python_version == "3.8"
31
25
  Requires-Dist: matplotlib==3.8.1; python_version >= "3.9"
26
+ Requires-Dist: pillow==10.1.0; python_version >= "3.8"
27
+ Requires-Dist: numpy==1.24.4; python_version == "3.8"
32
28
  Requires-Dist: numpy==1.26.1; python_version >= "3.9"
29
+ Requires-Dist: requests==2.31.0; python_version >= "3.8"
30
+ Requires-Dist: tkcolorpicker
31
+ Requires-Dist: distro<=1.8
32
+ Requires-Dist: importlib-metadata<=6.8.0
33
+ Dynamic: author
34
+ Dynamic: author-email
35
+ Dynamic: classifier
36
+ Dynamic: description
37
+ Dynamic: home-page
38
+ Dynamic: keywords
39
+ Dynamic: license-file
40
+ Dynamic: project-url
41
+ Dynamic: requires-dist
42
+ Dynamic: summary
33
43
 
34
44
  .. image:: https://img.shields.io/pypi/v/scipion-pyworkflow.svg
35
45
  :target: https://pypi.python.org/pypi/scipion-pyworkflow
@@ -1,9 +1,9 @@
1
1
  pyworkflow/__init__.py,sha256=Wr-MVKMQJy_Cy-rpPPB0-pyv8-8tx7GPPaLSNBrV1AI,1246
2
2
  pyworkflow/config.py,sha256=_sq6YCRfcfE4WUNXLShgbICbBeTSLpD2KmPVapR1Wos,22306
3
- pyworkflow/constants.py,sha256=-3fJkkmdAhvEm5sXIXLEJI6VpE8Uhzhj2X3Sau732Y4,7448
3
+ pyworkflow/constants.py,sha256=XJiTVSmA0jR8yYgxW-tMaWCPonXgn6P5KXI0__W8mtc,7758
4
4
  pyworkflow/exceptions.py,sha256=3VFxuNJHcIWxRnLPR0vYg0RFAQMmxPBJZLZSi87VI8E,507
5
- pyworkflow/object.py,sha256=yX3tBUuBssCI8aWGlTmyEKzFE59dJq_7NqllouVzMk0,55069
6
- pyworkflow/plugin.py,sha256=JJm5plPyOnPAR4To6I5rXVIBda-1Dg-53zicdnLSrac,28858
5
+ pyworkflow/object.py,sha256=01nUr0Ud-EDVCULtElfC4patgxUYuUIbrI5qmCsC7BY,55149
6
+ pyworkflow/plugin.py,sha256=UAwrx8xm0TkVx16q-jYqvCkYva021DdSWUTav4utJJo,28860
7
7
  pyworkflow/template.py,sha256=uScWMZCR3U6ishDlx7QGDDr8B_aLpKXoh8zOhqAirCY,10463
8
8
  pyworkflow/viewer.py,sha256=vA6VxYuxmCwMjIxCIdrp_G-R-nVo-0TA8p1rSl24EOY,11386
9
9
  pyworkflow/wizard.py,sha256=nXuk_qMUVlQNa6nB6GCt0CoFQ_P2dnJbGWdwtpDG2tQ,2633
@@ -22,7 +22,7 @@ pyworkflow/gui/__init__.py,sha256=D7PVYky81yTcqvkGEtw1CIxRkICpsn4TO5z5lJg84yM,13
22
22
  pyworkflow/gui/browser.py,sha256=Be4keeCLl32t5HzrqOdMhk_6oJ93Q5WX5FKy8iO9hYU,25590
23
23
  pyworkflow/gui/canvas.py,sha256=M_pD7QbqnYq9MY145WN6BYzeIAo0cIkbxLrZKNE80yg,42106
24
24
  pyworkflow/gui/dialog.py,sha256=35YpyOmUsmiVqdjU-k4AFDG7PKq0te4AUR-oTofhxEk,35329
25
- pyworkflow/gui/form.py,sha256=ho_HeqB4e4lPaM1WclVYzfYYMpnwZm_xYCRrlXDQf0E,104000
25
+ pyworkflow/gui/form.py,sha256=_rqKWNx2tzyU5wg9mup3COo0edrGuzZ-sq29p-JZg0w,104007
26
26
  pyworkflow/gui/graph.py,sha256=HSZ5PfFKkhv3tzl0o9UtSU_FfDevKR2M3L9HgU9_Gkc,8747
27
27
  pyworkflow/gui/graph_layout.py,sha256=R9uTc_Ei4oWAeCkZ3d78JjC1QKV7HPJaDLyhvSfK7NE,9520
28
28
  pyworkflow/gui/gui.py,sha256=pEtBYo0bf5grDX8e6Ju920aSDGiWno85pxrSxcMrSTY,20627
@@ -37,7 +37,7 @@ pyworkflow/gui/project/base.py,sha256=N64UXy5ep7RFweyBWTDro9UESgKRRAIvlRuotWIaO_
37
37
  pyworkflow/gui/project/constants.py,sha256=rORDaNCdAFRFcBmjRt2PJ1cXpSmYgDLfbvrbqZh1Bb0,4943
38
38
  pyworkflow/gui/project/labels.py,sha256=7m4heNcN3zXe0BHuFJyQrPD5hC8LiHnlu1sizRc8l_Y,7680
39
39
  pyworkflow/gui/project/project.py,sha256=LgpwqnOIr6RPqBb76fOoWrPrpdBzQCQev7JuRLj4lEA,18852
40
- pyworkflow/gui/project/searchprotocol.py,sha256=xv1nDk7tigQ4M7SLc_ZqGY8c6kG1crjP_xCp9KJjVZ0,5753
40
+ pyworkflow/gui/project/searchprotocol.py,sha256=5f3suNir_UNn2CIppmNoKHR0JZIfyuH7XRbEBaGVPsI,5762
41
41
  pyworkflow/gui/project/searchrun.py,sha256=xCGxs7L9PMk48ei9Gi3HI2PAOYEM-zXR5vRfAQRLHKI,7269
42
42
  pyworkflow/gui/project/steps.py,sha256=fq0WhPBoByFs8Lr-qmGZ-aFC4lx3XCF2zM2DOBd_KAc,6053
43
43
  pyworkflow/gui/project/utils.py,sha256=H9oFPzz9lAjAI4PRYSYyYniTBn98Y6YSs0bE0qXpMEM,11923
@@ -45,20 +45,20 @@ pyworkflow/gui/project/variables.py,sha256=UczW6yQqzssj3eETEKaae5GSpsWr04ItPrr5o
45
45
  pyworkflow/gui/project/viewdata.py,sha256=Xoxy1YTqKGSU2DUiM7j23gf-yv4ToNmX_zWgRpVZDo4,18187
46
46
  pyworkflow/gui/project/viewprojects.py,sha256=ZcWzfFLVeCKzVsFboxquzBsOagEoPW0CpVUQ8ZLpvQE,22516
47
47
  pyworkflow/gui/project/viewprotocols.py,sha256=sykimCQUHCUG5Irjr8__sIZJgJEzG-wgZb7SrdpzkVE,84014
48
- pyworkflow/gui/project/viewprotocols_extra.py,sha256=d3YZ3KAU8wMdn8psOvDxUDxcN0prruRNdb31bMa8_M0,21250
48
+ pyworkflow/gui/project/viewprotocols_extra.py,sha256=fp7TKQtndw-b0c7GEbTox_w270eKNJnrFybgTCPOg1Q,21261
49
49
  pyworkflow/mapper/__init__.py,sha256=HM7tMMd1r2BZ8hbPGryUQ80E4evY46zIVjiZ3edg_Mg,1186
50
50
  pyworkflow/mapper/mapper.py,sha256=YGVlBK3btHL-jLID6v2jLy7Mb9Wu47lxfZjHdzz0hMg,7726
51
- pyworkflow/mapper/sqlite.py,sha256=II53wccWx7kRWLFEKyHKTyX9yr5Qz5-d_IxmSdgjdVc,65269
51
+ pyworkflow/mapper/sqlite.py,sha256=ESOkr_whle59ECg-icgm8nAin9VMkgocwZSGUYxOgEY,65427
52
52
  pyworkflow/mapper/sqlite_db.py,sha256=HYSXe_fRX1NBKDs6l2zefdO4PMEajnoDMXPjmoFs8Kc,5602
53
53
  pyworkflow/project/__init__.py,sha256=05l9tvr3FfBNL3XNZSbFCs6yO-1HbFlmFu204w7yyKk,1369
54
54
  pyworkflow/project/config.py,sha256=F9H1vLJJ9vPyT6zeSqHX69o-wbaN8hWueOuDn53evjc,13692
55
55
  pyworkflow/project/manager.py,sha256=bk5hFDuIJqEgELU0plFG-RycGhhFfcJxmPp_qhFj1mE,6758
56
- pyworkflow/project/project.py,sha256=oBJYSzZyZesXpauMjRo2PyNCr9mYly5zWB8pb-jTSiw,83882
56
+ pyworkflow/project/project.py,sha256=7o2XNmfgrXWRMkmyE2OfOkOB4CpQ4yPoipGz7BdW4I8,83532
57
57
  pyworkflow/project/scripts/clean_projects.py,sha256=5qsLHIyJk7sZJUrfSp8sbMRYTkbZ2purtc-5JJb8awM,2600
58
58
  pyworkflow/project/scripts/config.py,sha256=VF4NMsykWzQjCQHRwnfFwxFq2PSk57Ni3TpgTh3nF3w,2933
59
59
  pyworkflow/project/scripts/create.py,sha256=cmyYJoKP4J9goPRRtQRM9jrsfp-DARHS0XeKJ0ObSCE,2781
60
60
  pyworkflow/project/scripts/edit_workflow.py,sha256=AhwwEFjEgTRR8LUZ4tBSoY2jprDgUmmgFR3FcIU5uno,2940
61
- pyworkflow/project/scripts/fix_links.py,sha256=i6hRbczKk3A6GpRtBSp357nT4p7fdbW84enaPx0ueAI,958
61
+ pyworkflow/project/scripts/fix_links.py,sha256=u6UXXqIIh-Rq8FYPmmtvcrgPdszPt93LnkVv0xowAEY,1049
62
62
  pyworkflow/project/scripts/load.py,sha256=oA_xZjO94N-hZohLZQXBKEdmE7BZuWxH7x9gPx9lMj8,2696
63
63
  pyworkflow/project/scripts/refresh.py,sha256=-uw41ouFgEzIF4iBXFRzAI79Lna7fqMmEKhRciSUpTA,2603
64
64
  pyworkflow/project/scripts/schedule.py,sha256=mUUlaUSiMvA_skES6WOL0Mg-j7-S9Cx6dN-6wx5ZM6Y,3989
@@ -66,13 +66,13 @@ pyworkflow/project/scripts/stack2volume.py,sha256=ZV8qtPj4qWg2LJSXHBnx-S8L8pyGGQ
66
66
  pyworkflow/project/scripts/stop.py,sha256=vCeCxkwPCoUkLbna5HCxKWJ1hrsI4U19Sg9JD4ksXj8,2427
67
67
  pyworkflow/protocol/__init__.py,sha256=bAdIpvUW4GAYdIuv92DZ44-OEkZ7lTtnp1S9T5cwtVs,1413
68
68
  pyworkflow/protocol/bibtex.py,sha256=mCUk1Hp5Vp_i2lozDM1BQNOw10e_RSu86oXvrR63sOA,2122
69
- pyworkflow/protocol/constants.py,sha256=DfuCs7eub-mLHJjEpHlIG9BW3fUpRwfTVwMYytNWv6U,3392
70
- pyworkflow/protocol/executor.py,sha256=qWm5Baf8snMzS0DyvjeyPIBPqkYgfiRqDOUt_8kqTYg,17960
71
- pyworkflow/protocol/hosts.py,sha256=B9ENNclqYe75CPqAMOoPjwn-r3ST6HxTewXtsK_zWks,10453
69
+ pyworkflow/protocol/constants.py,sha256=gyYtGFjfZbyAi2nDK6YpIRq6baZIJc_EdPD3oP2VdNM,3391
70
+ pyworkflow/protocol/executor.py,sha256=3f1lZ0Y3ahv7BOkQJLsghJG7vtx30p2BhlttLE7XHx8,18728
71
+ pyworkflow/protocol/hosts.py,sha256=fk2RpLL0E6wPphSTr0zl1enq-MHtTAKLQaWq_6cbcZw,10499
72
72
  pyworkflow/protocol/launch.py,sha256=7WKAiHma2tSuhqK4xVnxD_SnVt7Y5qyDFdQwTo8BLF0,11267
73
73
  pyworkflow/protocol/package.py,sha256=L6x3HHKtbrhDQRJHD07SG3DQKNMGaRQ0ROoLEY3SuRQ,1444
74
74
  pyworkflow/protocol/params.py,sha256=gP6QImgULvzCr-f0iseArIp1bjXm1JuUr9padmuMs7M,25901
75
- pyworkflow/protocol/protocol.py,sha256=YX8C02zfhLCTJPIz8NgZozXPQiKn5IqEXoyyZAHzPLI,97552
75
+ pyworkflow/protocol/protocol.py,sha256=48x-NleLfKxrzYIz13rZ1EsWa74vlomNZdd54mEIvuw,99724
76
76
  pyworkflow/resources/Imagej.png,sha256=nU2nWI1wxZB_xlOKsZzdUjj-qiCTjO6GwEKYgZ5Risg,14480
77
77
  pyworkflow/resources/chimera.png,sha256=AKCuwMqmZo0Cg2sddMUjBWUhmAq-nPsAVCBpVrYNeiQ,815
78
78
  pyworkflow/resources/fa-exclamation-triangle_alert.png,sha256=31_XvRu0CkJ2dvHSpcBAR43378lIJTWwiag_A7SuUQc,585
@@ -103,7 +103,7 @@ pyworkflow/utils/echo.py,sha256=ZXJRrmxUaTT4Xxf7_pQwg7Th341iFafTs66VEKNOZmE,3442
103
103
  pyworkflow/utils/graph.py,sha256=z3Hcj0I38du97DQEqNT5gk--SCDTRPlKotaCszoZfX8,4981
104
104
  pyworkflow/utils/log.py,sha256=8SIg1jwOKMQzGgDqutB7ZD42ZzLHslxULPvK-f1sDD0,10693
105
105
  pyworkflow/utils/path.py,sha256=hDisc13HhfB6CxpBcI1JBd5er_S6yVTKw1MFSw1AR3U,16803
106
- pyworkflow/utils/process.py,sha256=m6gZ_9vHTJyzXOCGGIIwFCbXXoH7XC-Okn5SJzs1P1U,4717
106
+ pyworkflow/utils/process.py,sha256=6l4Mf6C3nyYMeLBQxbrGaAWWmJvf6zQuTLQzPYQVxPU,5790
107
107
  pyworkflow/utils/profiler.py,sha256=BC0KkAgfYqf-CV40zLcRxo5Td79f5jw1gzvaDH8iqt8,2218
108
108
  pyworkflow/utils/progressbar.py,sha256=VntEF_FTdQHjMKawfR1R4IoNgYNTEMmnLUIDvUXurxk,5903
109
109
  pyworkflow/utils/properties.py,sha256=D1LHkVn0cbxCHhedJE4XAw5ZwOufb_4z48IeJ0L0fWc,23960
@@ -117,7 +117,7 @@ pyworkflow/webservices/repository.py,sha256=Hw2ZhvuJzKbN5ivTuN2gTNeJT49Q3-PuM9Bd
117
117
  pyworkflow/webservices/workflowhub.py,sha256=hA4RETMXmxUF-l605INS1TCT2nWnUwOIjrYKzRUZVLQ,2179
118
118
  pyworkflowtests/__init__.py,sha256=RoXNgyShL7moVEXaimTDdfY1fU26dgGKtdjO4JfBQOk,1686
119
119
  pyworkflowtests/bibtex.py,sha256=1f9PjkRO937PB2b-ax-eKLwjU4YY11M5px3dk3xWQzw,2102
120
- pyworkflowtests/objects.py,sha256=f4lRHxKg6A23tAhF-8qHXslDPaHBdy4qk72TfR5g_dg,26651
120
+ pyworkflowtests/objects.py,sha256=uaD9THeybiCkUDbb_cpqEwqCpVG3RLyMsIjOKOX7oNQ,26688
121
121
  pyworkflowtests/protocols.py,sha256=gSCtA5IuG0a_rvAjZ8uZQIxOr6gmrjfvaaqFnpGY2ds,5529
122
122
  pyworkflowtests/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
123
  pyworkflowtests/tests/test_canvas.py,sha256=_d3Xccp7BuwdbkNC5cTTNVXH425V4k1hKYuhUsn4JtU,2685
@@ -126,15 +126,15 @@ pyworkflowtests/tests/test_logs.py,sha256=lNHPtvPIWk_nRy3L3B2GriroMwgNmTZ_-RHRw0
126
126
  pyworkflowtests/tests/test_mappers.py,sha256=2DBzZaM8rIKQTSU9xWAsNGwZkpUPB1FBwgwMmNNbVkg,15423
127
127
  pyworkflowtests/tests/test_object.py,sha256=WVWP11oXNeOTUDJ5BLFR32MmQi6C5Y6KjfVRBf9fu3w,18577
128
128
  pyworkflowtests/tests/test_project.py,sha256=RBrhpIs45dWLrciHqzpj4ORyLZCNvjm8fytIolOZ9Ko,1685
129
- pyworkflowtests/tests/test_protocol_execution.py,sha256=uOokU-bgbd-1GqnEKNPcr02vJsAJvjHQ7I3vXNH9jCw,5081
129
+ pyworkflowtests/tests/test_protocol_execution.py,sha256=mawHqMjzxZH-sbHapKR2sk5WRlMrBs5UQXIbqGSiVPM,5558
130
130
  pyworkflowtests/tests/test_protocol_export.py,sha256=z18nKPkOnrYLMU8KqcnVsF6-ylQ8d9mw-qFJWRn4Qdw,3291
131
131
  pyworkflowtests/tests/test_protocol_output.py,sha256=8gnIFMRNmwPnIBRCG29WHJB6mqK4FLGn1jiXHtTD6pY,5980
132
132
  pyworkflowtests/tests/test_streaming.py,sha256=vOH-bKCM-fVUSsejqNnCX5TPXhdUayk9ZtJHsNVcfCY,1615
133
133
  pyworkflowtests/tests/test_utils.py,sha256=_pTYGCuXC7YNMdCBzUYNfSBCR3etrHsxHfIhsQi4VPc,7465
134
- scipion_pyworkflow-3.10.5.dist-info/LICENSE.txt,sha256=ixuiBLtpoK3iv89l7ylKkg9rs2GzF9ukPH7ynZYzK5s,35148
135
- scipion_pyworkflow-3.10.5.dist-info/METADATA,sha256=0CpQn_hwt_ND8BCgrCIJRTrQDLHDZfnJMwWmKCsiXrg,4682
136
- scipion_pyworkflow-3.10.5.dist-info/WHEEL,sha256=a7TGlA-5DaHMRrarXjVbQagU3Man_dCnGIWMJr5kRWo,91
137
- scipion_pyworkflow-3.10.5.dist-info/dependency_links.txt,sha256=D7r_CPRjYRtBb3q_OBocTdsaeXI5TwnYMu5ri0JFtzs,84
138
- scipion_pyworkflow-3.10.5.dist-info/entry_points.txt,sha256=oR-zwsOICjEPINm-FWVPp-RfnpXZanVal4_XG6BWkkQ,127
139
- scipion_pyworkflow-3.10.5.dist-info/top_level.txt,sha256=PzyJteyenJwLjAeSFP7oYrTN_U71GABQwET8oLZkh9k,27
140
- scipion_pyworkflow-3.10.5.dist-info/RECORD,,
134
+ scipion_pyworkflow-3.10.6.dist-info/licenses/LICENSE.txt,sha256=ixuiBLtpoK3iv89l7ylKkg9rs2GzF9ukPH7ynZYzK5s,35148
135
+ scipion_pyworkflow-3.10.6.dist-info/METADATA,sha256=jkbWXYBuW00lIHyLX9lCm1AfBL6-0NvxDHVeVoA1Cx8,4881
136
+ scipion_pyworkflow-3.10.6.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
137
+ scipion_pyworkflow-3.10.6.dist-info/dependency_links.txt,sha256=D7r_CPRjYRtBb3q_OBocTdsaeXI5TwnYMu5ri0JFtzs,84
138
+ scipion_pyworkflow-3.10.6.dist-info/entry_points.txt,sha256=-P6GVocWl_NS8wS7lB-bTKf-tKANbw4n7DlXXh_VrWk,54
139
+ scipion_pyworkflow-3.10.6.dist-info/top_level.txt,sha256=PzyJteyenJwLjAeSFP7oYrTN_U71GABQwET8oLZkh9k,27
140
+ scipion_pyworkflow-3.10.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.4.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1,2 @@
1
+ [pyworkflow.plugin]
2
+ pyworkflowtests = pyworkflowtests
@@ -1,5 +0,0 @@
1
- [console_scripts]
2
- fix_links = pyworkflow.project.scripts.fix_links:main
3
-
4
- [pyworkflow.plugin]
5
- pyworkflowtests = pyworkflowtests