wmglobalqueue 2.4.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- Utils/CPMetrics.py +270 -0
- Utils/CertTools.py +100 -0
- Utils/EmailAlert.py +50 -0
- Utils/ExtendedUnitTestCase.py +62 -0
- Utils/FileTools.py +182 -0
- Utils/IteratorTools.py +80 -0
- Utils/MathUtils.py +31 -0
- Utils/MemoryCache.py +119 -0
- Utils/Patterns.py +24 -0
- Utils/Pipeline.py +137 -0
- Utils/PortForward.py +97 -0
- Utils/ProcFS.py +112 -0
- Utils/ProcessStats.py +194 -0
- Utils/PythonVersion.py +17 -0
- Utils/Signals.py +36 -0
- Utils/TemporaryEnvironment.py +27 -0
- Utils/Throttled.py +227 -0
- Utils/Timers.py +130 -0
- Utils/Timestamps.py +86 -0
- Utils/TokenManager.py +143 -0
- Utils/Tracing.py +60 -0
- Utils/TwPrint.py +98 -0
- Utils/Utilities.py +318 -0
- Utils/__init__.py +11 -0
- Utils/wmcoreDTools.py +707 -0
- WMCore/ACDC/Collection.py +57 -0
- WMCore/ACDC/CollectionTypes.py +12 -0
- WMCore/ACDC/CouchCollection.py +67 -0
- WMCore/ACDC/CouchFileset.py +238 -0
- WMCore/ACDC/CouchService.py +73 -0
- WMCore/ACDC/DataCollectionService.py +485 -0
- WMCore/ACDC/Fileset.py +94 -0
- WMCore/ACDC/__init__.py +11 -0
- WMCore/Algorithms/Alarm.py +39 -0
- WMCore/Algorithms/MathAlgos.py +274 -0
- WMCore/Algorithms/MiscAlgos.py +67 -0
- WMCore/Algorithms/ParseXMLFile.py +115 -0
- WMCore/Algorithms/Permissions.py +27 -0
- WMCore/Algorithms/Singleton.py +58 -0
- WMCore/Algorithms/SubprocessAlgos.py +129 -0
- WMCore/Algorithms/__init__.py +7 -0
- WMCore/Cache/GenericDataCache.py +98 -0
- WMCore/Cache/WMConfigCache.py +572 -0
- WMCore/Cache/__init__.py +0 -0
- WMCore/Configuration.py +659 -0
- WMCore/DAOFactory.py +47 -0
- WMCore/DataStructs/File.py +177 -0
- WMCore/DataStructs/Fileset.py +140 -0
- WMCore/DataStructs/Job.py +182 -0
- WMCore/DataStructs/JobGroup.py +142 -0
- WMCore/DataStructs/JobPackage.py +49 -0
- WMCore/DataStructs/LumiList.py +734 -0
- WMCore/DataStructs/Mask.py +219 -0
- WMCore/DataStructs/MathStructs/ContinuousSummaryHistogram.py +197 -0
- WMCore/DataStructs/MathStructs/DiscreteSummaryHistogram.py +92 -0
- WMCore/DataStructs/MathStructs/SummaryHistogram.py +117 -0
- WMCore/DataStructs/MathStructs/__init__.py +0 -0
- WMCore/DataStructs/Pickleable.py +24 -0
- WMCore/DataStructs/Run.py +256 -0
- WMCore/DataStructs/Subscription.py +175 -0
- WMCore/DataStructs/WMObject.py +47 -0
- WMCore/DataStructs/WorkUnit.py +112 -0
- WMCore/DataStructs/Workflow.py +60 -0
- WMCore/DataStructs/__init__.py +8 -0
- WMCore/Database/CMSCouch.py +1430 -0
- WMCore/Database/ConfigDBMap.py +29 -0
- WMCore/Database/CouchMonitoring.py +450 -0
- WMCore/Database/CouchUtils.py +118 -0
- WMCore/Database/DBCore.py +198 -0
- WMCore/Database/DBCreator.py +113 -0
- WMCore/Database/DBExceptionHandler.py +59 -0
- WMCore/Database/DBFactory.py +117 -0
- WMCore/Database/DBFormatter.py +177 -0
- WMCore/Database/Dialects.py +13 -0
- WMCore/Database/ExecuteDAO.py +327 -0
- WMCore/Database/MongoDB.py +241 -0
- WMCore/Database/MySQL/Destroy.py +42 -0
- WMCore/Database/MySQL/ListUserContent.py +20 -0
- WMCore/Database/MySQL/__init__.py +9 -0
- WMCore/Database/MySQLCore.py +132 -0
- WMCore/Database/Oracle/Destroy.py +56 -0
- WMCore/Database/Oracle/ListUserContent.py +19 -0
- WMCore/Database/Oracle/__init__.py +9 -0
- WMCore/Database/ResultSet.py +44 -0
- WMCore/Database/Transaction.py +91 -0
- WMCore/Database/__init__.py +9 -0
- WMCore/Database/ipy_profile_couch.py +438 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/CleanUpTask.py +29 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/HeartbeatMonitor.py +105 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/LocationUpdateTask.py +28 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/ReqMgrInteractionTask.py +35 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/__init__.py +0 -0
- WMCore/GlobalWorkQueue/__init__.py +0 -0
- WMCore/GroupUser/CouchObject.py +127 -0
- WMCore/GroupUser/Decorators.py +51 -0
- WMCore/GroupUser/Group.py +33 -0
- WMCore/GroupUser/Interface.py +73 -0
- WMCore/GroupUser/User.py +96 -0
- WMCore/GroupUser/__init__.py +11 -0
- WMCore/Lexicon.py +836 -0
- WMCore/REST/Auth.py +202 -0
- WMCore/REST/CherryPyPeriodicTask.py +166 -0
- WMCore/REST/Error.py +333 -0
- WMCore/REST/Format.py +642 -0
- WMCore/REST/HeartbeatMonitorBase.py +90 -0
- WMCore/REST/Main.py +636 -0
- WMCore/REST/Server.py +2435 -0
- WMCore/REST/Services.py +24 -0
- WMCore/REST/Test.py +120 -0
- WMCore/REST/Tools.py +38 -0
- WMCore/REST/Validation.py +250 -0
- WMCore/REST/__init__.py +1 -0
- WMCore/ReqMgr/DataStructs/RequestStatus.py +209 -0
- WMCore/ReqMgr/DataStructs/RequestType.py +13 -0
- WMCore/ReqMgr/DataStructs/__init__.py +0 -0
- WMCore/ReqMgr/__init__.py +1 -0
- WMCore/Services/AlertManager/AlertManagerAPI.py +111 -0
- WMCore/Services/AlertManager/__init__.py +0 -0
- WMCore/Services/CRIC/CRIC.py +238 -0
- WMCore/Services/CRIC/__init__.py +0 -0
- WMCore/Services/DBS/DBS3Reader.py +1044 -0
- WMCore/Services/DBS/DBSConcurrency.py +44 -0
- WMCore/Services/DBS/DBSErrors.py +112 -0
- WMCore/Services/DBS/DBSReader.py +23 -0
- WMCore/Services/DBS/DBSUtils.py +166 -0
- WMCore/Services/DBS/DBSWriterObjects.py +381 -0
- WMCore/Services/DBS/ProdException.py +133 -0
- WMCore/Services/DBS/__init__.py +8 -0
- WMCore/Services/FWJRDB/FWJRDBAPI.py +118 -0
- WMCore/Services/FWJRDB/__init__.py +0 -0
- WMCore/Services/HTTPS/HTTPSAuthHandler.py +66 -0
- WMCore/Services/HTTPS/__init__.py +0 -0
- WMCore/Services/LogDB/LogDB.py +201 -0
- WMCore/Services/LogDB/LogDBBackend.py +191 -0
- WMCore/Services/LogDB/LogDBExceptions.py +11 -0
- WMCore/Services/LogDB/LogDBReport.py +85 -0
- WMCore/Services/LogDB/__init__.py +0 -0
- WMCore/Services/MSPileup/__init__.py +0 -0
- WMCore/Services/MSUtils/MSUtils.py +54 -0
- WMCore/Services/MSUtils/__init__.py +0 -0
- WMCore/Services/McM/McM.py +173 -0
- WMCore/Services/McM/__init__.py +8 -0
- WMCore/Services/MonIT/Grafana.py +133 -0
- WMCore/Services/MonIT/__init__.py +0 -0
- WMCore/Services/PyCondor/PyCondorAPI.py +154 -0
- WMCore/Services/PyCondor/__init__.py +0 -0
- WMCore/Services/ReqMgr/ReqMgr.py +261 -0
- WMCore/Services/ReqMgr/__init__.py +0 -0
- WMCore/Services/ReqMgrAux/ReqMgrAux.py +419 -0
- WMCore/Services/ReqMgrAux/__init__.py +0 -0
- WMCore/Services/RequestDB/RequestDBReader.py +267 -0
- WMCore/Services/RequestDB/RequestDBWriter.py +39 -0
- WMCore/Services/RequestDB/__init__.py +0 -0
- WMCore/Services/Requests.py +624 -0
- WMCore/Services/Rucio/Rucio.py +1290 -0
- WMCore/Services/Rucio/RucioUtils.py +74 -0
- WMCore/Services/Rucio/__init__.py +0 -0
- WMCore/Services/RucioConMon/RucioConMon.py +121 -0
- WMCore/Services/RucioConMon/__init__.py +0 -0
- WMCore/Services/Service.py +400 -0
- WMCore/Services/StompAMQ/__init__.py +0 -0
- WMCore/Services/TagCollector/TagCollector.py +155 -0
- WMCore/Services/TagCollector/XMLUtils.py +98 -0
- WMCore/Services/TagCollector/__init__.py +0 -0
- WMCore/Services/UUIDLib.py +13 -0
- WMCore/Services/UserFileCache/UserFileCache.py +160 -0
- WMCore/Services/UserFileCache/__init__.py +8 -0
- WMCore/Services/WMAgent/WMAgent.py +63 -0
- WMCore/Services/WMAgent/__init__.py +0 -0
- WMCore/Services/WMArchive/CMSSWMetrics.py +526 -0
- WMCore/Services/WMArchive/DataMap.py +463 -0
- WMCore/Services/WMArchive/WMArchive.py +33 -0
- WMCore/Services/WMArchive/__init__.py +0 -0
- WMCore/Services/WMBS/WMBS.py +97 -0
- WMCore/Services/WMBS/__init__.py +0 -0
- WMCore/Services/WMStats/DataStruct/RequestInfoCollection.py +300 -0
- WMCore/Services/WMStats/DataStruct/__init__.py +0 -0
- WMCore/Services/WMStats/WMStatsPycurl.py +145 -0
- WMCore/Services/WMStats/WMStatsReader.py +445 -0
- WMCore/Services/WMStats/WMStatsWriter.py +273 -0
- WMCore/Services/WMStats/__init__.py +0 -0
- WMCore/Services/WMStatsServer/WMStatsServer.py +134 -0
- WMCore/Services/WMStatsServer/__init__.py +0 -0
- WMCore/Services/WorkQueue/WorkQueue.py +492 -0
- WMCore/Services/WorkQueue/__init__.py +0 -0
- WMCore/Services/__init__.py +8 -0
- WMCore/Services/pycurl_manager.py +574 -0
- WMCore/WMBase.py +50 -0
- WMCore/WMConnectionBase.py +164 -0
- WMCore/WMException.py +183 -0
- WMCore/WMExceptions.py +269 -0
- WMCore/WMFactory.py +76 -0
- WMCore/WMInit.py +377 -0
- WMCore/WMLogging.py +104 -0
- WMCore/WMSpec/ConfigSectionTree.py +442 -0
- WMCore/WMSpec/Persistency.py +135 -0
- WMCore/WMSpec/Steps/BuildMaster.py +87 -0
- WMCore/WMSpec/Steps/BuildTools.py +201 -0
- WMCore/WMSpec/Steps/Builder.py +97 -0
- WMCore/WMSpec/Steps/Diagnostic.py +89 -0
- WMCore/WMSpec/Steps/Emulator.py +62 -0
- WMCore/WMSpec/Steps/ExecuteMaster.py +208 -0
- WMCore/WMSpec/Steps/Executor.py +210 -0
- WMCore/WMSpec/Steps/StepFactory.py +213 -0
- WMCore/WMSpec/Steps/TaskEmulator.py +75 -0
- WMCore/WMSpec/Steps/Template.py +204 -0
- WMCore/WMSpec/Steps/Templates/AlcaHarvest.py +76 -0
- WMCore/WMSpec/Steps/Templates/CMSSW.py +613 -0
- WMCore/WMSpec/Steps/Templates/DQMUpload.py +59 -0
- WMCore/WMSpec/Steps/Templates/DeleteFiles.py +70 -0
- WMCore/WMSpec/Steps/Templates/LogArchive.py +84 -0
- WMCore/WMSpec/Steps/Templates/LogCollect.py +105 -0
- WMCore/WMSpec/Steps/Templates/StageOut.py +105 -0
- WMCore/WMSpec/Steps/Templates/__init__.py +10 -0
- WMCore/WMSpec/Steps/WMExecutionFailure.py +21 -0
- WMCore/WMSpec/Steps/__init__.py +8 -0
- WMCore/WMSpec/Utilities.py +63 -0
- WMCore/WMSpec/WMSpecErrors.py +12 -0
- WMCore/WMSpec/WMStep.py +347 -0
- WMCore/WMSpec/WMTask.py +1997 -0
- WMCore/WMSpec/WMWorkload.py +2288 -0
- WMCore/WMSpec/WMWorkloadTools.py +382 -0
- WMCore/WMSpec/__init__.py +9 -0
- WMCore/WorkQueue/DataLocationMapper.py +273 -0
- WMCore/WorkQueue/DataStructs/ACDCBlock.py +47 -0
- WMCore/WorkQueue/DataStructs/Block.py +48 -0
- WMCore/WorkQueue/DataStructs/CouchWorkQueueElement.py +148 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElement.py +274 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElementResult.py +152 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElementsSummary.py +185 -0
- WMCore/WorkQueue/DataStructs/__init__.py +0 -0
- WMCore/WorkQueue/Policy/End/EndPolicyInterface.py +44 -0
- WMCore/WorkQueue/Policy/End/SingleShot.py +22 -0
- WMCore/WorkQueue/Policy/End/__init__.py +32 -0
- WMCore/WorkQueue/Policy/PolicyInterface.py +17 -0
- WMCore/WorkQueue/Policy/Start/Block.py +258 -0
- WMCore/WorkQueue/Policy/Start/Dataset.py +180 -0
- WMCore/WorkQueue/Policy/Start/MonteCarlo.py +131 -0
- WMCore/WorkQueue/Policy/Start/ResubmitBlock.py +171 -0
- WMCore/WorkQueue/Policy/Start/StartPolicyInterface.py +316 -0
- WMCore/WorkQueue/Policy/Start/__init__.py +34 -0
- WMCore/WorkQueue/Policy/__init__.py +57 -0
- WMCore/WorkQueue/WMBSHelper.py +772 -0
- WMCore/WorkQueue/WorkQueue.py +1237 -0
- WMCore/WorkQueue/WorkQueueBackend.py +750 -0
- WMCore/WorkQueue/WorkQueueBase.py +39 -0
- WMCore/WorkQueue/WorkQueueExceptions.py +44 -0
- WMCore/WorkQueue/WorkQueueReqMgrInterface.py +278 -0
- WMCore/WorkQueue/WorkQueueUtils.py +130 -0
- WMCore/WorkQueue/__init__.py +13 -0
- WMCore/Wrappers/JsonWrapper/JSONThunker.py +342 -0
- WMCore/Wrappers/JsonWrapper/__init__.py +7 -0
- WMCore/Wrappers/__init__.py +6 -0
- WMCore/__init__.py +10 -0
- wmglobalqueue-2.4.5.1.data/data/bin/wmc-dist-patch +15 -0
- wmglobalqueue-2.4.5.1.data/data/bin/wmc-dist-unpatch +8 -0
- wmglobalqueue-2.4.5.1.data/data/bin/wmc-httpd +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/.couchapprc +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/README.md +40 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/index.html +264 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/ElementInfoByWorkflow.js +96 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/StuckElementInfo.js +57 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/WorkloadInfoTable.js +80 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/dataTable.js +70 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/namespace.js +23 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/style/main.css +75 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/couchapp.json +4 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/filters/childQueueFilter.js +13 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/filters/filterDeletedDocs.js +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/filters/queueFilter.js +11 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/language +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lib/mustache.js +333 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lib/validate.js +27 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lib/workqueue_utils.js +61 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/elementsDetail.js +28 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/filter.js +86 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/stuckElements.js +38 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/workRestrictions.js +153 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/workflowSummary.js +28 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/rewrites.json +73 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/shows/redirect.js +23 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/shows/status.js +40 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/ElementSummaryByWorkflow.html +27 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/StuckElementSummary.html +26 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/TaskStatus.html +23 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/WorkflowSummary.html +27 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/partials/workqueue-common-lib.html +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib-remote.html +16 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib.html +18 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/updates/in-place.js +50 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/validate_doc_update.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.couch.app.js +235 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.pathbinder.js +173 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeData/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeParentData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeParentData/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activePileupData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activePileupData/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/analyticsData/map.js +11 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/analyticsData/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/availableByPriority/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/conflicts/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elements/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByParent/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByParentData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByPileupData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByStatus/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsBySubscription/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/reduce.js +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsDetailByWorkflowAndStatus/map.js +26 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/map.js +10 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatus/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatus/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/openRequests/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/recent-items/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/specsByWorkflow/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/stuckElements/map.js +38 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/map.js +12 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/reduce.js +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrl/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrl/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/workflowSummary/map.js +9 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/workflowSummary/reduce.js +10 -0
- wmglobalqueue-2.4.5.1.dist-info/METADATA +26 -0
- wmglobalqueue-2.4.5.1.dist-info/RECORD +347 -0
- wmglobalqueue-2.4.5.1.dist-info/WHEEL +5 -0
- wmglobalqueue-2.4.5.1.dist-info/licenses/LICENSE +202 -0
- wmglobalqueue-2.4.5.1.dist-info/licenses/NOTICE +16 -0
- wmglobalqueue-2.4.5.1.dist-info/top_level.txt +2 -0
WMCore/WMSpec/WMTask.py
ADDED
|
@@ -0,0 +1,1997 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# pylint: disable=W0212
|
|
3
|
+
# W0212 (protected-access): Access to protected names of a client class.
|
|
4
|
+
"""
|
|
5
|
+
_WMTask_
|
|
6
|
+
|
|
7
|
+
Object containing a set of executable Steps which form a template for a
|
|
8
|
+
set of jobs.
|
|
9
|
+
|
|
10
|
+
Equivalent of a WorkflowSpec in the ProdSystem.
|
|
11
|
+
"""
|
|
12
|
+
import json
|
|
13
|
+
from builtins import map, zip, str as newstr, bytes
|
|
14
|
+
from future.utils import viewitems
|
|
15
|
+
|
|
16
|
+
import logging
|
|
17
|
+
import os.path
|
|
18
|
+
import time
|
|
19
|
+
|
|
20
|
+
import WMCore.WMSpec.Steps.StepFactory as StepFactory
|
|
21
|
+
import WMCore.WMSpec.Utilities as SpecUtils
|
|
22
|
+
from WMCore.Configuration import ConfigSection
|
|
23
|
+
from WMCore.DataStructs.LumiList import LumiList
|
|
24
|
+
from WMCore.DataStructs.Workflow import Workflow as DataStructsWorkflow
|
|
25
|
+
from WMCore.Lexicon import lfnBase
|
|
26
|
+
from WMCore.WMSpec.ConfigSectionTree import ConfigSectionTree, TreeHelper
|
|
27
|
+
from WMCore.WMSpec.Steps.BuildMaster import BuildMaster
|
|
28
|
+
from WMCore.WMSpec.Steps.ExecuteMaster import ExecuteMaster
|
|
29
|
+
from WMCore.WMSpec.Steps.Template import CoreHelper
|
|
30
|
+
from WMCore.WMSpec.WMStep import WMStep, WMStepHelper
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def getTaskFromStep(stepRef):
|
|
34
|
+
"""
|
|
35
|
+
_getTaskFromStep_
|
|
36
|
+
|
|
37
|
+
Traverse up the step tree until finding the first WMTask entry,
|
|
38
|
+
return it wrapped in a WMTaskHelper
|
|
39
|
+
|
|
40
|
+
"""
|
|
41
|
+
nodeData = stepRef
|
|
42
|
+
if isinstance(stepRef, WMStepHelper):
|
|
43
|
+
nodeData = stepRef.data
|
|
44
|
+
|
|
45
|
+
taskNode = SpecUtils.findTaskAboveNode(nodeData)
|
|
46
|
+
if taskNode is None:
|
|
47
|
+
msg = "Unable to find Task containing step\n"
|
|
48
|
+
# TODO: Replace with real exception class
|
|
49
|
+
raise RuntimeError(msg)
|
|
50
|
+
|
|
51
|
+
return WMTaskHelper(taskNode)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def buildLumiMask(runs, lumis):
|
|
55
|
+
"""
|
|
56
|
+
Runs are saved in the spec as a list of integers.
|
|
57
|
+
The lumi mask associated to each run is saved as a list of strings
|
|
58
|
+
where each string is in a format like '1,4,23,45'
|
|
59
|
+
|
|
60
|
+
The method convert these parameters in the corresponding lumiMask,
|
|
61
|
+
e.g.: runs=['3','4'], lumis=['1,4,23,45', '5,84,234,445'] => lumiMask = {'3':[[1,4],[23,45]],'4':[[5,84],[234,445]]}
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
if len(runs) != len(lumis):
|
|
65
|
+
raise ValueError("runs and lumis must have same length")
|
|
66
|
+
for lumi in lumis:
|
|
67
|
+
if len(lumi.split(',')) % 2:
|
|
68
|
+
raise ValueError("Needs an even number of lumi in each element of lumis list")
|
|
69
|
+
|
|
70
|
+
lumiLists = [list(map(list, list(zip([int(y) for y in x.split(',')][::2], [int(y) for y in x.split(',')][1::2]))))
|
|
71
|
+
for x
|
|
72
|
+
in lumis]
|
|
73
|
+
strRuns = [str(run) for run in runs]
|
|
74
|
+
|
|
75
|
+
lumiMask = dict(list(zip(strRuns, lumiLists)))
|
|
76
|
+
|
|
77
|
+
return lumiMask
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class WMTaskHelper(TreeHelper):
|
|
81
|
+
"""
|
|
82
|
+
_WMTaskHelper_
|
|
83
|
+
|
|
84
|
+
Util wrapper containing tools & methods for manipulating the WMTask
|
|
85
|
+
data object.
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
def __init__(self, wmTask):
|
|
89
|
+
TreeHelper.__init__(self, wmTask)
|
|
90
|
+
self.startTime = None
|
|
91
|
+
self.endTime = None
|
|
92
|
+
self.monitoring = None
|
|
93
|
+
|
|
94
|
+
def addTask(self, taskName):
|
|
95
|
+
"""
|
|
96
|
+
_addTask_
|
|
97
|
+
|
|
98
|
+
Add a new task as a subtask with the name provided and
|
|
99
|
+
return it wrapped in a TaskHelper
|
|
100
|
+
|
|
101
|
+
"""
|
|
102
|
+
node = WMTaskHelper(WMTask(taskName))
|
|
103
|
+
self.addNode(node)
|
|
104
|
+
pName = "%s/%s" % (self.getPathName(), taskName)
|
|
105
|
+
node.setPathName(pName)
|
|
106
|
+
return node
|
|
107
|
+
|
|
108
|
+
def taskIterator(self):
|
|
109
|
+
"""
|
|
110
|
+
_taskIterator_
|
|
111
|
+
|
|
112
|
+
return output of nodeIterator(self) wrapped in TaskHelper instance
|
|
113
|
+
|
|
114
|
+
"""
|
|
115
|
+
for x in self.nodeIterator():
|
|
116
|
+
yield WMTaskHelper(x)
|
|
117
|
+
|
|
118
|
+
def childTaskIterator(self):
|
|
119
|
+
"""
|
|
120
|
+
_childTaskIterator_
|
|
121
|
+
|
|
122
|
+
Iterate over all the first generation child tasks.
|
|
123
|
+
"""
|
|
124
|
+
for x in self.firstGenNodeChildIterator():
|
|
125
|
+
yield WMTaskHelper(x)
|
|
126
|
+
|
|
127
|
+
def setPathName(self, pathName):
|
|
128
|
+
"""
|
|
129
|
+
_setPathName_
|
|
130
|
+
|
|
131
|
+
Set the path name of the task within the workload
|
|
132
|
+
Used internally when addin tasks to workloads or subtasks
|
|
133
|
+
|
|
134
|
+
"""
|
|
135
|
+
self.data.pathName = pathName
|
|
136
|
+
|
|
137
|
+
def getPathName(self):
|
|
138
|
+
"""
|
|
139
|
+
_getPathName_
|
|
140
|
+
|
|
141
|
+
get the path name of this task reflecting its
|
|
142
|
+
structure within the workload and task tree
|
|
143
|
+
|
|
144
|
+
"""
|
|
145
|
+
return self.data.pathName
|
|
146
|
+
|
|
147
|
+
def name(self):
|
|
148
|
+
"""
|
|
149
|
+
_name_
|
|
150
|
+
|
|
151
|
+
Retrieve the name of this task.
|
|
152
|
+
"""
|
|
153
|
+
return self.data._internal_name
|
|
154
|
+
|
|
155
|
+
def listPathNames(self):
|
|
156
|
+
"""
|
|
157
|
+
_listPathNames_
|
|
158
|
+
|
|
159
|
+
"""
|
|
160
|
+
for t in self.taskIterator():
|
|
161
|
+
yield t.getPathName()
|
|
162
|
+
|
|
163
|
+
def listNames(self):
|
|
164
|
+
"""
|
|
165
|
+
_listNames_
|
|
166
|
+
Returns a generator with the name of all the children tasks
|
|
167
|
+
"""
|
|
168
|
+
for t in self.taskIterator():
|
|
169
|
+
yield t.name()
|
|
170
|
+
|
|
171
|
+
def listChildNames(self):
|
|
172
|
+
"""
|
|
173
|
+
_listChildNames_
|
|
174
|
+
Return a list with the name of the first generation children tasks
|
|
175
|
+
"""
|
|
176
|
+
names = []
|
|
177
|
+
for t in self.childTaskIterator():
|
|
178
|
+
names.append(t.name())
|
|
179
|
+
return names
|
|
180
|
+
|
|
181
|
+
def makeWorkflow(self):
|
|
182
|
+
"""
|
|
183
|
+
_makeWorkflow_
|
|
184
|
+
|
|
185
|
+
Create a WMBS compatible Workflow structure that represents this
|
|
186
|
+
task and the information contained within it
|
|
187
|
+
|
|
188
|
+
"""
|
|
189
|
+
workflow = DataStructsWorkflow()
|
|
190
|
+
workflow.task = self.getPathName()
|
|
191
|
+
return workflow
|
|
192
|
+
|
|
193
|
+
def steps(self):
|
|
194
|
+
"""get WMStep structure"""
|
|
195
|
+
if self.data.steps.topStepName is None:
|
|
196
|
+
return None
|
|
197
|
+
step = getattr(self.data.steps, self.data.steps.topStepName, None)
|
|
198
|
+
return WMStepHelper(step)
|
|
199
|
+
|
|
200
|
+
def getTopStepName(self):
|
|
201
|
+
"""
|
|
202
|
+
_getTopStepName_
|
|
203
|
+
|
|
204
|
+
Retrieve the name of the top step.
|
|
205
|
+
"""
|
|
206
|
+
return self.data.steps.topStepName
|
|
207
|
+
|
|
208
|
+
def setStep(self, wmStep):
|
|
209
|
+
"""set topStep to be the step instance provided"""
|
|
210
|
+
stepData = wmStep
|
|
211
|
+
if isinstance(wmStep, WMStepHelper):
|
|
212
|
+
stepData = wmStep.data
|
|
213
|
+
stepHelper = wmStep
|
|
214
|
+
else:
|
|
215
|
+
stepHelper = WMStepHelper(wmStep)
|
|
216
|
+
|
|
217
|
+
stepName = stepHelper.name()
|
|
218
|
+
stepHelper.setTopOfTree()
|
|
219
|
+
setattr(self.data.steps, stepName, stepData)
|
|
220
|
+
setattr(self.data.steps, "topStepName", stepName)
|
|
221
|
+
return
|
|
222
|
+
|
|
223
|
+
def listAllStepNames(self, cmsRunOnly=False):
|
|
224
|
+
"""
|
|
225
|
+
_listAllStepNames_
|
|
226
|
+
|
|
227
|
+
Get a list of all the step names contained in this task.
|
|
228
|
+
"""
|
|
229
|
+
step = self.steps()
|
|
230
|
+
if step:
|
|
231
|
+
stepNames = step.allNodeNames()
|
|
232
|
+
if cmsRunOnly:
|
|
233
|
+
stepNames = [step for step in stepNames if step.startswith("cmsRun")]
|
|
234
|
+
return stepNames
|
|
235
|
+
else:
|
|
236
|
+
return []
|
|
237
|
+
|
|
238
|
+
def getStep(self, stepName):
|
|
239
|
+
"""get a particular step from the workflow"""
|
|
240
|
+
if self.data.steps.topStepName is None:
|
|
241
|
+
return None
|
|
242
|
+
topStep = self.steps()
|
|
243
|
+
return topStep.getStep(stepName)
|
|
244
|
+
|
|
245
|
+
def makeStep(self, stepName):
|
|
246
|
+
"""
|
|
247
|
+
_makeStep_
|
|
248
|
+
|
|
249
|
+
create a new WMStep instance, install it as the top step and
|
|
250
|
+
return the reference to the new step wrapped in a StepHelper
|
|
251
|
+
|
|
252
|
+
"""
|
|
253
|
+
newStep = WMStep(stepName)
|
|
254
|
+
self.setStep(newStep)
|
|
255
|
+
return WMStepHelper(newStep)
|
|
256
|
+
|
|
257
|
+
def applyTemplates(self):
|
|
258
|
+
"""
|
|
259
|
+
_applyTemplates_
|
|
260
|
+
|
|
261
|
+
For each step, load the appropriate template and install the default structure
|
|
262
|
+
|
|
263
|
+
TODO: Exception handling
|
|
264
|
+
|
|
265
|
+
"""
|
|
266
|
+
for step in self.steps().nodeIterator():
|
|
267
|
+
stepType = step.stepType
|
|
268
|
+
template = StepFactory.getStepTemplate(stepType)
|
|
269
|
+
template(step)
|
|
270
|
+
|
|
271
|
+
def getStepHelper(self, stepName):
|
|
272
|
+
"""
|
|
273
|
+
_getStepHelper_
|
|
274
|
+
|
|
275
|
+
Get the named step, look up its type specific helper and retrieve
|
|
276
|
+
the step wrapped in the type based helper.
|
|
277
|
+
|
|
278
|
+
"""
|
|
279
|
+
step = self.getStep(stepName)
|
|
280
|
+
stepType = step.stepType()
|
|
281
|
+
template = StepFactory.getStepTemplate(stepType)
|
|
282
|
+
helper = template.helper(step.data)
|
|
283
|
+
return helper
|
|
284
|
+
|
|
285
|
+
def getOutputModulesForTask(self, cmsRunOnly=False):
|
|
286
|
+
"""
|
|
287
|
+
_getOutputModulesForTask_
|
|
288
|
+
|
|
289
|
+
Retrieve all the output modules in the given task.
|
|
290
|
+
If cmsRunOnly is set to True, then return the output modules for
|
|
291
|
+
cmsRun steps only.
|
|
292
|
+
"""
|
|
293
|
+
outputModules = []
|
|
294
|
+
for stepName in self.listAllStepNames(cmsRunOnly):
|
|
295
|
+
outputModules.append(self.getOutputModulesForStep(stepName))
|
|
296
|
+
return outputModules
|
|
297
|
+
|
|
298
|
+
def getIgnoredOutputModulesForTask(self):
|
|
299
|
+
"""
|
|
300
|
+
_getIgnoredOutputModulesForTask_
|
|
301
|
+
|
|
302
|
+
Retrieve the ignored output modules in the given task.
|
|
303
|
+
"""
|
|
304
|
+
ignoredOutputModules = []
|
|
305
|
+
for stepName in self.listAllStepNames():
|
|
306
|
+
stepHelper = self.getStepHelper(stepName)
|
|
307
|
+
ignoredOutputModules.extend(stepHelper.getIgnoredOutputModules())
|
|
308
|
+
return ignoredOutputModules
|
|
309
|
+
|
|
310
|
+
def getOutputModulesForStep(self, stepName):
|
|
311
|
+
"""
|
|
312
|
+
_getOutputModulesForStep_
|
|
313
|
+
|
|
314
|
+
Retrieve all the output modules for the particular step.
|
|
315
|
+
"""
|
|
316
|
+
step = self.getStep(stepName)
|
|
317
|
+
|
|
318
|
+
if hasattr(step.data, "output"):
|
|
319
|
+
if hasattr(step.data.output, "modules"):
|
|
320
|
+
return step.data.output.modules
|
|
321
|
+
|
|
322
|
+
return ConfigSection()
|
|
323
|
+
|
|
324
|
+
def build(self, workingDir):
|
|
325
|
+
"""
|
|
326
|
+
_build_
|
|
327
|
+
|
|
328
|
+
Invoke the build process to create the job in the working dir provided
|
|
329
|
+
|
|
330
|
+
"""
|
|
331
|
+
master = BuildMaster(workingDir)
|
|
332
|
+
master(self)
|
|
333
|
+
return
|
|
334
|
+
|
|
335
|
+
def addEnvironmentVariables(self, envDict):
|
|
336
|
+
"""
|
|
337
|
+
_addEnvironmentVariables_
|
|
338
|
+
|
|
339
|
+
add a key = value style setting to the environment for this task and all
|
|
340
|
+
its children
|
|
341
|
+
"""
|
|
342
|
+
for key, value in viewitems(envDict):
|
|
343
|
+
setattr(self.data.environment, key, value)
|
|
344
|
+
for task in self.childTaskIterator():
|
|
345
|
+
task.addEnvironmentVariables(envDict)
|
|
346
|
+
return
|
|
347
|
+
|
|
348
|
+
def setOverrideCatalog(self, tfcFile):
|
|
349
|
+
"""
|
|
350
|
+
_setOverrideCatalog_
|
|
351
|
+
|
|
352
|
+
Used for setting overrideCatalog option for each step in the task.
|
|
353
|
+
"""
|
|
354
|
+
for step in self.steps().nodeIterator():
|
|
355
|
+
step = CoreHelper(step)
|
|
356
|
+
step.setOverrideCatalog(tfcFile)
|
|
357
|
+
for task in self.childTaskIterator():
|
|
358
|
+
task.setOverrideCatalog(tfcFile)
|
|
359
|
+
return
|
|
360
|
+
|
|
361
|
+
def getEnvironmentVariables(self):
|
|
362
|
+
"""
|
|
363
|
+
_getEnvironmentVariables_
|
|
364
|
+
|
|
365
|
+
Retrieve a dictionary with all environment variables defined for this task
|
|
366
|
+
"""
|
|
367
|
+
return self.data.environment.dictionary_()
|
|
368
|
+
|
|
369
|
+
def setupEnvironment(self):
|
|
370
|
+
"""
|
|
371
|
+
_setupEnvironment_
|
|
372
|
+
|
|
373
|
+
I don't know if this should go here.
|
|
374
|
+
Setup the environment variables mandated in the WMTask
|
|
375
|
+
"""
|
|
376
|
+
|
|
377
|
+
if not hasattr(self.data, 'environment'):
|
|
378
|
+
# No environment to setup, pass
|
|
379
|
+
return
|
|
380
|
+
|
|
381
|
+
envDict = self.data.environment.dictionary_()
|
|
382
|
+
|
|
383
|
+
for key in envDict:
|
|
384
|
+
if str(envDict[key].__class__) == "<class 'WMCore.Configuration.ConfigSection'>":
|
|
385
|
+
# At this point we do not support the
|
|
386
|
+
# setting of sub-sections for environment variables
|
|
387
|
+
continue
|
|
388
|
+
else:
|
|
389
|
+
os.environ[key] = envDict[key]
|
|
390
|
+
|
|
391
|
+
return
|
|
392
|
+
|
|
393
|
+
def execute(self, wmbsJob):
|
|
394
|
+
"""
|
|
395
|
+
_execute_
|
|
396
|
+
|
|
397
|
+
Invoke execution of the steps
|
|
398
|
+
|
|
399
|
+
"""
|
|
400
|
+
self.startTime = time.time()
|
|
401
|
+
self.setupEnvironment()
|
|
402
|
+
master = ExecuteMaster()
|
|
403
|
+
master(self, wmbsJob)
|
|
404
|
+
self.endTime = time.time()
|
|
405
|
+
return
|
|
406
|
+
|
|
407
|
+
def setInputReference(self, stepRef, **extras):
|
|
408
|
+
"""
|
|
409
|
+
_setInputReference_
|
|
410
|
+
|
|
411
|
+
Add details to the input reference for the task providing
|
|
412
|
+
input to this task.
|
|
413
|
+
The reference is the step in the input task, plus
|
|
414
|
+
any extra information.
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
"""
|
|
418
|
+
stepId = SpecUtils.stepIdentifier(stepRef)
|
|
419
|
+
setattr(self.data.input, "inputStep", stepId)
|
|
420
|
+
for key, val in viewitems(extras):
|
|
421
|
+
setattr(self.data.input, key, val)
|
|
422
|
+
|
|
423
|
+
return
|
|
424
|
+
|
|
425
|
+
def setInputStep(self, stepName):
|
|
426
|
+
"""
|
|
427
|
+
_setInputStep_
|
|
428
|
+
|
|
429
|
+
Set the name of the step used who's output is used as input for this
|
|
430
|
+
task.
|
|
431
|
+
"""
|
|
432
|
+
self.data.input.inputStep = stepName
|
|
433
|
+
return
|
|
434
|
+
|
|
435
|
+
def getInputStep(self):
|
|
436
|
+
"""
|
|
437
|
+
_getInputStep_
|
|
438
|
+
|
|
439
|
+
Retrieve the name of the input step, if there is one.
|
|
440
|
+
"""
|
|
441
|
+
return getattr(self.data.input, "inputStep", None)
|
|
442
|
+
|
|
443
|
+
def inputReference(self):
|
|
444
|
+
"""
|
|
445
|
+
_inputReference_
|
|
446
|
+
|
|
447
|
+
Get information about the input reference for this task.
|
|
448
|
+
|
|
449
|
+
"""
|
|
450
|
+
return self.data.input
|
|
451
|
+
|
|
452
|
+
def setFirstEventAndLumi(self, firstEvent, firstLumi):
|
|
453
|
+
"""
|
|
454
|
+
_setFirstEventAndLumi_
|
|
455
|
+
|
|
456
|
+
Set an arbitrary first event and first lumi
|
|
457
|
+
Only used by production workflows
|
|
458
|
+
"""
|
|
459
|
+
|
|
460
|
+
if not hasattr(self.data, "production"):
|
|
461
|
+
self.data._section("production")
|
|
462
|
+
setattr(self.data.production, "firstEvent", firstEvent)
|
|
463
|
+
setattr(self.data.production, "firstLumi", firstLumi)
|
|
464
|
+
|
|
465
|
+
def getFirstEvent(self):
|
|
466
|
+
"""
|
|
467
|
+
_getFirstEvent_
|
|
468
|
+
|
|
469
|
+
Get first event to produce for the task
|
|
470
|
+
"""
|
|
471
|
+
if hasattr(self.data, "production"):
|
|
472
|
+
if hasattr(self.data.production, "firstLumi"):
|
|
473
|
+
return self.data.production.firstEvent
|
|
474
|
+
return 1
|
|
475
|
+
|
|
476
|
+
def getFirstLumi(self):
|
|
477
|
+
"""
|
|
478
|
+
_getFirstLumi_
|
|
479
|
+
|
|
480
|
+
Get first lumi to produce for the task
|
|
481
|
+
"""
|
|
482
|
+
if hasattr(self.data, "production"):
|
|
483
|
+
if hasattr(self.data.production, "firstLumi"):
|
|
484
|
+
return self.data.production.firstLumi
|
|
485
|
+
return 1
|
|
486
|
+
|
|
487
|
+
def setSplittingParameters(self, **params):
|
|
488
|
+
"""
|
|
489
|
+
_setSplittingParameters_
|
|
490
|
+
|
|
491
|
+
Set the job splitting parameters.
|
|
492
|
+
"""
|
|
493
|
+
for key, val in viewitems(params):
|
|
494
|
+
setattr(self.data.input.splitting, key, val)
|
|
495
|
+
|
|
496
|
+
return
|
|
497
|
+
|
|
498
|
+
def setSplittingAlgorithm(self, algoName, **params):
|
|
499
|
+
"""
|
|
500
|
+
_setSplittingAlgorithm_
|
|
501
|
+
|
|
502
|
+
Set the splitting algorithm name and arguments. Clear out any old
|
|
503
|
+
splitting parameters while preserving the parameters for ACDC
|
|
504
|
+
resubmission which are:
|
|
505
|
+
collectionName, filesetName, couchURL, couchDB, owner, group
|
|
506
|
+
|
|
507
|
+
This also needs to preserve the parameter we use to set the initial
|
|
508
|
+
LFN counter, whether or not we merge across runs and the runWhitelist:
|
|
509
|
+
initial_lfn_counter
|
|
510
|
+
merge_across_runs
|
|
511
|
+
runWhitelist
|
|
512
|
+
|
|
513
|
+
Preserve parameters which can be set up at request creation and if not
|
|
514
|
+
specified should remain unchanged, at the moment these are:
|
|
515
|
+
include_parents
|
|
516
|
+
lheInputFiles
|
|
517
|
+
|
|
518
|
+
Also preserve the performance section.
|
|
519
|
+
"""
|
|
520
|
+
setACDCParams = {}
|
|
521
|
+
for paramName in ["collectionName", "filesetName", "couchURL",
|
|
522
|
+
"couchDB", "owner", "group", "initial_lfn_counter",
|
|
523
|
+
"merge_across_runs", "runWhitelist"]:
|
|
524
|
+
if hasattr(self.data.input.splitting, paramName):
|
|
525
|
+
setACDCParams[paramName] = getattr(self.data.input.splitting,
|
|
526
|
+
paramName)
|
|
527
|
+
preservedParams = {}
|
|
528
|
+
for paramName in ["lheInputFiles", "include_parents", "deterministicPileup"]:
|
|
529
|
+
if hasattr(self.data.input.splitting, paramName):
|
|
530
|
+
preservedParams[paramName] = getattr(self.data.input.splitting,
|
|
531
|
+
paramName)
|
|
532
|
+
performanceConfig = getattr(self.data.input.splitting, "performance", None)
|
|
533
|
+
|
|
534
|
+
delattr(self.data.input, "splitting")
|
|
535
|
+
self.data.input.section_("splitting")
|
|
536
|
+
self.data.input.splitting.section_("performance")
|
|
537
|
+
|
|
538
|
+
setattr(self.data.input.splitting, "algorithm", algoName)
|
|
539
|
+
self.setSplittingParameters(**preservedParams)
|
|
540
|
+
self.setSplittingParameters(**params)
|
|
541
|
+
self.setSplittingParameters(**setACDCParams)
|
|
542
|
+
if performanceConfig is not None:
|
|
543
|
+
self.data.input.splitting.performance = performanceConfig
|
|
544
|
+
return
|
|
545
|
+
|
|
546
|
+
def updateSplittingParameters(self, algoName, **params):
|
|
547
|
+
"""
|
|
548
|
+
_updateSplittingAlgorithm_
|
|
549
|
+
:param algoName: string Algorithm name
|
|
550
|
+
:param params: splitting parameters
|
|
551
|
+
:return:
|
|
552
|
+
|
|
553
|
+
Only updates specific parameters in splitting Algorithm but doesn't remove the existing splitting parameters
|
|
554
|
+
"""
|
|
555
|
+
performanceConfig = getattr(self.data.input.splitting, "performance", None)
|
|
556
|
+
setattr(self.data.input.splitting, "algorithm", algoName)
|
|
557
|
+
self.data.input.splitting.section_("performance")
|
|
558
|
+
self.setSplittingParameters(**params)
|
|
559
|
+
if performanceConfig is not None:
|
|
560
|
+
self.data.input.splitting.performance = performanceConfig
|
|
561
|
+
return
|
|
562
|
+
|
|
563
|
+
def jobSplittingAlgorithm(self):
|
|
564
|
+
"""
|
|
565
|
+
_jobSplittingAlgorithm_
|
|
566
|
+
|
|
567
|
+
Retrieve the job splitting algorithm name.
|
|
568
|
+
"""
|
|
569
|
+
return getattr(self.data.input.splitting, "algorithm", None)
|
|
570
|
+
|
|
571
|
+
def jobSplittingParameters(self, performance=True):
|
|
572
|
+
"""
|
|
573
|
+
_jobSplittingParameters_
|
|
574
|
+
|
|
575
|
+
Retrieve the job splitting parameters. This will combine the job
|
|
576
|
+
splitting parameters specified in the spec with the site white list
|
|
577
|
+
and black list as those are passed to the job splitting code.
|
|
578
|
+
If required, also extract the performance parameters and pass them in the dict.
|
|
579
|
+
"""
|
|
580
|
+
datadict = getattr(self.data.input, "splitting")
|
|
581
|
+
if performance:
|
|
582
|
+
splittingParams = datadict.dictionary_whole_tree_()
|
|
583
|
+
else:
|
|
584
|
+
splittingParams = datadict.dictionary_()
|
|
585
|
+
if "performance" in splittingParams:
|
|
586
|
+
del splittingParams['performance']
|
|
587
|
+
splittingParams["siteWhitelist"] = self.siteWhitelist()
|
|
588
|
+
splittingParams["siteBlacklist"] = self.siteBlacklist()
|
|
589
|
+
splittingParams["trustSitelists"] = self.getTrustSitelists().get('trustlists')
|
|
590
|
+
splittingParams["trustPUSitelists"] = self.getTrustSitelists().get('trustPUlists')
|
|
591
|
+
|
|
592
|
+
if "runWhitelist" not in splittingParams and self.inputRunWhitelist() is not None:
|
|
593
|
+
splittingParams["runWhitelist"] = self.inputRunWhitelist()
|
|
594
|
+
if "runBlacklist" not in splittingParams and self.inputRunBlacklist() is not None:
|
|
595
|
+
splittingParams["runBlacklist"] = self.inputRunBlacklist()
|
|
596
|
+
|
|
597
|
+
return splittingParams
|
|
598
|
+
|
|
599
|
+
def setJobResourceInformation(self, timePerEvent=None, sizePerEvent=None, memoryReq=None):
|
|
600
|
+
"""
|
|
601
|
+
_setJobResourceInformation_
|
|
602
|
+
|
|
603
|
+
Set the values to estimate the required computing resources for a job,
|
|
604
|
+
the three key values are main memory usage, time per processing unit (e.g. time per event) and
|
|
605
|
+
disk usage per processing unit (e.g. size per event).
|
|
606
|
+
"""
|
|
607
|
+
if self.taskType() in ["Merge", "Cleanup", "LogCollect"]:
|
|
608
|
+
# don't touch job requirements for these task types
|
|
609
|
+
return
|
|
610
|
+
|
|
611
|
+
performanceParams = getattr(self.data.input.splitting, "performance")
|
|
612
|
+
|
|
613
|
+
timePerEvent = timePerEvent.get(self.name()) if isinstance(timePerEvent, dict) else timePerEvent
|
|
614
|
+
sizePerEvent = sizePerEvent.get(self.name()) if isinstance(sizePerEvent, dict) else sizePerEvent
|
|
615
|
+
memoryReq = memoryReq.get(self.name()) if isinstance(memoryReq, dict) else memoryReq
|
|
616
|
+
|
|
617
|
+
if timePerEvent or getattr(performanceParams, "timePerEvent", None):
|
|
618
|
+
performanceParams.timePerEvent = timePerEvent or getattr(performanceParams, "timePerEvent")
|
|
619
|
+
if sizePerEvent or getattr(performanceParams, "sizePerEvent", None):
|
|
620
|
+
performanceParams.sizePerEvent = sizePerEvent or getattr(performanceParams, "sizePerEvent")
|
|
621
|
+
if memoryReq or getattr(performanceParams, "memoryRequirement", None):
|
|
622
|
+
performanceParams.memoryRequirement = memoryReq or getattr(performanceParams, "memoryRequirement")
|
|
623
|
+
# if we change memory requirements, then we must change MaxPSS as well
|
|
624
|
+
self.setMaxPSS(performanceParams.memoryRequirement)
|
|
625
|
+
|
|
626
|
+
return
|
|
627
|
+
|
|
628
|
+
def addGenerator(self, generatorName, **settings):
|
|
629
|
+
"""
|
|
630
|
+
_addGenerator_
|
|
631
|
+
|
|
632
|
+
|
|
633
|
+
"""
|
|
634
|
+
if 'generators' not in self.data.listSections_():
|
|
635
|
+
self.data.section_('generators')
|
|
636
|
+
if generatorName not in self.data.generators.listSections_():
|
|
637
|
+
self.data.generators.section_(generatorName)
|
|
638
|
+
|
|
639
|
+
helper = TreeHelper(getattr(self.data.generators, generatorName))
|
|
640
|
+
helper.addValue(settings)
|
|
641
|
+
|
|
642
|
+
return
|
|
643
|
+
|
|
644
|
+
def listGenerators(self):
|
|
645
|
+
"""
|
|
646
|
+
_listGenerators_
|
|
647
|
+
|
|
648
|
+
"""
|
|
649
|
+
generators = getattr(self.data, "generators", None)
|
|
650
|
+
if generators is None:
|
|
651
|
+
return []
|
|
652
|
+
return generators.listSections_()
|
|
653
|
+
|
|
654
|
+
def getGeneratorSettings(self, generatorName):
|
|
655
|
+
"""
|
|
656
|
+
_getGeneratorSettings_
|
|
657
|
+
|
|
658
|
+
Extract the settings from the generator fields
|
|
659
|
+
"""
|
|
660
|
+
generators = getattr(self.data, "generators", None)
|
|
661
|
+
if generators is None:
|
|
662
|
+
return {}
|
|
663
|
+
generator = getattr(generators, generatorName, None)
|
|
664
|
+
if generator is None:
|
|
665
|
+
return {}
|
|
666
|
+
|
|
667
|
+
confValues = TreeHelper(generator)
|
|
668
|
+
args = {}
|
|
669
|
+
tempArgs = confValues.pythoniseDict(sections=False)
|
|
670
|
+
for entry in tempArgs:
|
|
671
|
+
args[entry.split('%s.' % generatorName)[1]] = tempArgs[entry]
|
|
672
|
+
return args
|
|
673
|
+
|
|
674
|
+
def addInputACDC(self, serverUrl, databaseName, collectionName,
|
|
675
|
+
filesetName):
|
|
676
|
+
"""
|
|
677
|
+
_addInputACDC_
|
|
678
|
+
|
|
679
|
+
Set the ACDC input information for this task.
|
|
680
|
+
"""
|
|
681
|
+
self.data.input.section_("acdc")
|
|
682
|
+
self.data.input.acdc.server = serverUrl
|
|
683
|
+
self.data.input.acdc.database = databaseName
|
|
684
|
+
self.data.input.acdc.collection = collectionName
|
|
685
|
+
self.data.input.acdc.fileset = filesetName
|
|
686
|
+
return
|
|
687
|
+
|
|
688
|
+
def getInputACDC(self):
|
|
689
|
+
"""
|
|
690
|
+
_getInputACDC_
|
|
691
|
+
|
|
692
|
+
Retrieve the ACDC input configuration.
|
|
693
|
+
"""
|
|
694
|
+
if not hasattr(self.data.input, "acdc"):
|
|
695
|
+
return None
|
|
696
|
+
|
|
697
|
+
return {"server": self.data.input.acdc.server,
|
|
698
|
+
"collection": self.data.input.acdc.collection,
|
|
699
|
+
"fileset": self.data.input.acdc.fileset,
|
|
700
|
+
"database": self.data.input.acdc.database}
|
|
701
|
+
|
|
702
|
+
def addInputDataset(self, **options):
|
|
703
|
+
"""
|
|
704
|
+
_addInputDataset_
|
|
705
|
+
|
|
706
|
+
Add details of an input dataset to this Task.
|
|
707
|
+
This dataset will be used as input for the first step
|
|
708
|
+
in the task
|
|
709
|
+
|
|
710
|
+
options should contain at least:
|
|
711
|
+
- name - dataset name
|
|
712
|
+
- primary - primary dataset name
|
|
713
|
+
- processed - processed dataset name
|
|
714
|
+
- tier - data tier name
|
|
715
|
+
|
|
716
|
+
optional args:
|
|
717
|
+
- dbsurl - dbs url if not global
|
|
718
|
+
- block_whitelist - list of whitelisted fileblocks
|
|
719
|
+
- block_blacklist - list of blacklisted fileblocks
|
|
720
|
+
- run_whitelist - list of whitelist runs
|
|
721
|
+
- run_blacklist - list of blacklist runs
|
|
722
|
+
"""
|
|
723
|
+
self.data.input.section_("dataset")
|
|
724
|
+
self.data.input.dataset.name = None
|
|
725
|
+
self.data.input.dataset.dbsurl = None
|
|
726
|
+
self.data.input.dataset.section_("blocks")
|
|
727
|
+
self.data.input.dataset.blocks.whitelist = []
|
|
728
|
+
self.data.input.dataset.blocks.blacklist = []
|
|
729
|
+
self.data.input.dataset.section_("runs")
|
|
730
|
+
self.data.input.dataset.runs.whitelist = []
|
|
731
|
+
self.data.input.dataset.runs.blacklist = []
|
|
732
|
+
|
|
733
|
+
try:
|
|
734
|
+
self.data.input.dataset.primary = options.pop('primary')
|
|
735
|
+
self.data.input.dataset.processed = options.pop('processed')
|
|
736
|
+
self.data.input.dataset.tier = options.pop('tier')
|
|
737
|
+
except KeyError:
|
|
738
|
+
raise RuntimeError("Primary, Processed and Tier must be set")
|
|
739
|
+
|
|
740
|
+
for opt, arg in viewitems(options):
|
|
741
|
+
if opt == 'block_blacklist':
|
|
742
|
+
self.setInputBlockBlacklist(arg)
|
|
743
|
+
elif opt == 'block_whitelist':
|
|
744
|
+
self.setInputBlockWhitelist(arg)
|
|
745
|
+
elif opt == 'dbsurl':
|
|
746
|
+
self.data.input.dataset.dbsurl = arg
|
|
747
|
+
elif opt == "run_whitelist":
|
|
748
|
+
self.setInputRunWhitelist(arg)
|
|
749
|
+
elif opt == "run_blacklist":
|
|
750
|
+
self.setInputRunBlacklist(arg)
|
|
751
|
+
else:
|
|
752
|
+
setattr(self.data.input.dataset, opt, arg)
|
|
753
|
+
|
|
754
|
+
return
|
|
755
|
+
|
|
756
|
+
def setInputBlockWhitelist(self, blockWhitelist):
|
|
757
|
+
"""
|
|
758
|
+
_setInputBlockWhitelist_
|
|
759
|
+
|
|
760
|
+
Set the block white list for the input dataset. This must be called
|
|
761
|
+
after setInputDataset().
|
|
762
|
+
"""
|
|
763
|
+
self.data.input.dataset.blocks.whitelist = blockWhitelist
|
|
764
|
+
return
|
|
765
|
+
|
|
766
|
+
def inputBlockWhitelist(self):
|
|
767
|
+
"""
|
|
768
|
+
_inputBlockWhitelist_
|
|
769
|
+
|
|
770
|
+
Retrieve the block white list for the input dataset if it exists, none
|
|
771
|
+
otherwise.
|
|
772
|
+
"""
|
|
773
|
+
if hasattr(self.data.input, "dataset"):
|
|
774
|
+
return self.data.input.dataset.blocks.whitelist
|
|
775
|
+
return None
|
|
776
|
+
|
|
777
|
+
def setInputBlockBlacklist(self, blockBlacklist):
|
|
778
|
+
"""
|
|
779
|
+
_setInputBlockBlacklist_
|
|
780
|
+
|
|
781
|
+
Set the block black list for the input dataset. This must be called
|
|
782
|
+
after setInputDataset().
|
|
783
|
+
"""
|
|
784
|
+
self.data.input.dataset.blocks.blacklist = blockBlacklist
|
|
785
|
+
return
|
|
786
|
+
|
|
787
|
+
def inputBlockBlacklist(self):
|
|
788
|
+
"""
|
|
789
|
+
_inputBlockBlacklist_
|
|
790
|
+
|
|
791
|
+
Retrieve the block black list for the input dataset if it exsits, none
|
|
792
|
+
otherwise.
|
|
793
|
+
"""
|
|
794
|
+
if hasattr(self.data.input, "dataset"):
|
|
795
|
+
return self.data.input.dataset.blocks.blacklist
|
|
796
|
+
return None
|
|
797
|
+
|
|
798
|
+
def setInputRunWhitelist(self, runWhitelist):
|
|
799
|
+
"""
|
|
800
|
+
_setInputRunWhitelist_
|
|
801
|
+
|
|
802
|
+
Set the run white list for the input dataset. This must be called
|
|
803
|
+
after setInputDataset().
|
|
804
|
+
"""
|
|
805
|
+
self.data.input.dataset.runs.whitelist = runWhitelist
|
|
806
|
+
return
|
|
807
|
+
|
|
808
|
+
def inputRunWhitelist(self):
|
|
809
|
+
"""
|
|
810
|
+
_inputRunWhitelist_
|
|
811
|
+
|
|
812
|
+
Retrieve the run white list for the input dataset if it exists, none
|
|
813
|
+
otherwise.
|
|
814
|
+
"""
|
|
815
|
+
if hasattr(self.data.input, "dataset"):
|
|
816
|
+
return self.data.input.dataset.runs.whitelist
|
|
817
|
+
return None
|
|
818
|
+
|
|
819
|
+
def setInputRunBlacklist(self, runBlacklist):
|
|
820
|
+
"""
|
|
821
|
+
_setInputRunBlacklist_
|
|
822
|
+
|
|
823
|
+
Set the run black list for the input dataset. This must be called
|
|
824
|
+
after setInputDataset().
|
|
825
|
+
"""
|
|
826
|
+
self.data.input.dataset.runs.blacklist = runBlacklist
|
|
827
|
+
return
|
|
828
|
+
|
|
829
|
+
def inputRunBlacklist(self):
|
|
830
|
+
"""
|
|
831
|
+
_inputRunBlacklist_
|
|
832
|
+
|
|
833
|
+
Retrieve the run black list for the input dataset if it exists, none
|
|
834
|
+
otherwise.
|
|
835
|
+
"""
|
|
836
|
+
if hasattr(self.data.input, "dataset"):
|
|
837
|
+
return self.data.input.dataset.runs.blacklist
|
|
838
|
+
return None
|
|
839
|
+
|
|
840
|
+
def addProduction(self, **options):
|
|
841
|
+
"""
|
|
842
|
+
_addProduction_
|
|
843
|
+
|
|
844
|
+
Add details of production job related information.
|
|
845
|
+
|
|
846
|
+
options should contain at least:
|
|
847
|
+
TODO: Not sure what is necessary data ask Dave
|
|
848
|
+
optional
|
|
849
|
+
- totalevents - total events in dataset
|
|
850
|
+
|
|
851
|
+
"""
|
|
852
|
+
if not hasattr(self.data, "production"):
|
|
853
|
+
self.data.section_("production")
|
|
854
|
+
|
|
855
|
+
for opt, arg in viewitems(options):
|
|
856
|
+
setattr(self.data.production, opt, arg)
|
|
857
|
+
|
|
858
|
+
def inputDataset(self):
|
|
859
|
+
"""
|
|
860
|
+
_inputDataset_
|
|
861
|
+
|
|
862
|
+
Get the input.dataset structure from this task
|
|
863
|
+
|
|
864
|
+
"""
|
|
865
|
+
return getattr(self.data.input, "dataset", None)
|
|
866
|
+
|
|
867
|
+
def getInputDatasetPath(self):
|
|
868
|
+
"""
|
|
869
|
+
_getInputDatasetPath_
|
|
870
|
+
|
|
871
|
+
Get the input dataset path because it's useful
|
|
872
|
+
"""
|
|
873
|
+
|
|
874
|
+
if hasattr(self.data.input, 'dataset'):
|
|
875
|
+
return getattr(self.data.input.dataset, 'name', None)
|
|
876
|
+
|
|
877
|
+
return None
|
|
878
|
+
|
|
879
|
+
def setInputPileupDatasets(self, dsetName):
|
|
880
|
+
"""
|
|
881
|
+
_setInputPileupDatasets_
|
|
882
|
+
|
|
883
|
+
Create a list of pileup datasets to be used by this task (possible
|
|
884
|
+
multiple CMSSW steps)
|
|
885
|
+
"""
|
|
886
|
+
self.data.input.section_("pileup")
|
|
887
|
+
if not hasattr(self.data.input.pileup, "datasets"):
|
|
888
|
+
self.data.input.pileup.datasets = []
|
|
889
|
+
|
|
890
|
+
if isinstance(dsetName, list):
|
|
891
|
+
self.data.input.pileup.datasets.extend(dsetName)
|
|
892
|
+
elif isinstance(dsetName, (newstr, bytes)):
|
|
893
|
+
self.data.input.pileup.datasets.append(dsetName)
|
|
894
|
+
else:
|
|
895
|
+
raise ValueError("Pileup dataset must be either a list or a string (unicode or bytes)")
|
|
896
|
+
# make the list unique
|
|
897
|
+
self.data.input.pileup.datasets = list(set(self.data.input.pileup.datasets))
|
|
898
|
+
|
|
899
|
+
def getInputPileupDatasets(self):
|
|
900
|
+
"""
|
|
901
|
+
_getInputPileupDatasets_
|
|
902
|
+
|
|
903
|
+
Get a list of the input pileup dataset name(s) for this task.
|
|
904
|
+
"""
|
|
905
|
+
if hasattr(self.data.input, 'pileup'):
|
|
906
|
+
return getattr(self.data.input.pileup, 'datasets', [])
|
|
907
|
+
return []
|
|
908
|
+
|
|
909
|
+
def siteWhitelist(self):
|
|
910
|
+
"""
|
|
911
|
+
_siteWhitelist_
|
|
912
|
+
|
|
913
|
+
Accessor for the site white list for the task.
|
|
914
|
+
"""
|
|
915
|
+
return self.data.constraints.sites.whitelist
|
|
916
|
+
|
|
917
|
+
def setSiteWhitelist(self, siteWhitelist):
|
|
918
|
+
"""
|
|
919
|
+
_setSiteWhitelist_
|
|
920
|
+
|
|
921
|
+
Set the set white list for the task.
|
|
922
|
+
"""
|
|
923
|
+
self.data.constraints.sites.whitelist = siteWhitelist
|
|
924
|
+
return
|
|
925
|
+
|
|
926
|
+
def siteBlacklist(self):
|
|
927
|
+
"""
|
|
928
|
+
_siteBlacklist_
|
|
929
|
+
|
|
930
|
+
Accessor for the site white list for the task.
|
|
931
|
+
"""
|
|
932
|
+
return self.data.constraints.sites.blacklist
|
|
933
|
+
|
|
934
|
+
def setSiteBlacklist(self, siteBlacklist):
|
|
935
|
+
"""
|
|
936
|
+
_setSiteBlacklist_
|
|
937
|
+
|
|
938
|
+
Set the site black list for the task.
|
|
939
|
+
"""
|
|
940
|
+
self.data.constraints.sites.blacklist = siteBlacklist
|
|
941
|
+
return
|
|
942
|
+
|
|
943
|
+
def getTrustSitelists(self):
|
|
944
|
+
"""
|
|
945
|
+
_getTrustSitelists_
|
|
946
|
+
|
|
947
|
+
Get the input and pileup flag for 'trust site lists' in the task.
|
|
948
|
+
"""
|
|
949
|
+
# handle backward compatibility for the request which doesn't contain trustPUlists
|
|
950
|
+
return {'trustlists': getattr(self.data.constraints.sites, 'trustlists', False),
|
|
951
|
+
'trustPUlists': getattr(self.data.constraints.sites, 'trustPUlists', False)}
|
|
952
|
+
|
|
953
|
+
def setTrustSitelists(self, trustSitelists, trustPUSitelists):
|
|
954
|
+
"""
|
|
955
|
+
_setTrustSitelists_
|
|
956
|
+
|
|
957
|
+
Set the input and the pileup flags for 'trust site lists' in the task.
|
|
958
|
+
"""
|
|
959
|
+
self.data.constraints.sites.trustlists = trustSitelists
|
|
960
|
+
self.data.constraints.sites.trustPUlists = trustPUSitelists
|
|
961
|
+
return
|
|
962
|
+
|
|
963
|
+
def listOutputDatasetsAndModules(self):
|
|
964
|
+
"""
|
|
965
|
+
_listOutputDatasetsAndModules_
|
|
966
|
+
|
|
967
|
+
Get the output datasets per output module for this task
|
|
968
|
+
"""
|
|
969
|
+
outputDatasets = []
|
|
970
|
+
for stepName in self.listAllStepNames():
|
|
971
|
+
stepHelper = self.getStepHelper(stepName)
|
|
972
|
+
|
|
973
|
+
if not getattr(stepHelper.data.output, "keep", True):
|
|
974
|
+
continue
|
|
975
|
+
|
|
976
|
+
if stepHelper.stepType() == "CMSSW":
|
|
977
|
+
for outputModuleName in stepHelper.listOutputModules():
|
|
978
|
+
outputModule = stepHelper.getOutputModule(outputModuleName)
|
|
979
|
+
outputDataset = "/%s/%s/%s" % (outputModule.primaryDataset,
|
|
980
|
+
outputModule.processedDataset,
|
|
981
|
+
outputModule.dataTier)
|
|
982
|
+
outputDatasets.append({"outputModule": outputModuleName,
|
|
983
|
+
"outputDataset": outputDataset})
|
|
984
|
+
|
|
985
|
+
return outputDatasets
|
|
986
|
+
|
|
987
|
+
def setSubscriptionInformation(self, custodialSites=None, nonCustodialSites=None,
|
|
988
|
+
priority="Low", primaryDataset=None,
|
|
989
|
+
useSkim=False, isSkim=False,
|
|
990
|
+
dataTier=None, deleteFromSource=False,
|
|
991
|
+
datasetLifetime=None):
|
|
992
|
+
"""
|
|
993
|
+
_setSubscriptionsInformation_
|
|
994
|
+
|
|
995
|
+
Set the subscription information for this task's datasets
|
|
996
|
+
The subscriptions information is structured as follows:
|
|
997
|
+
data.subscriptions.outputSubs is a list with the output section names (1 per dataset)
|
|
998
|
+
data.subscriptions.<outputSection>.dataset
|
|
999
|
+
data.subscriptions.<outputSection>.outputModule
|
|
1000
|
+
data.subscriptions.<outputSection>.custodialSites
|
|
1001
|
+
data.subscriptions.<outputSection>.nonCustodialSites
|
|
1002
|
+
data.subscriptions.<outputSection>.priority
|
|
1003
|
+
|
|
1004
|
+
The filters arguments allow to define a dataTier and primaryDataset. Only datasets
|
|
1005
|
+
matching those values will be configured.
|
|
1006
|
+
"""
|
|
1007
|
+
custodialSites = custodialSites or []
|
|
1008
|
+
nonCustodialSites = nonCustodialSites or []
|
|
1009
|
+
|
|
1010
|
+
if not hasattr(self.data, "subscriptions"):
|
|
1011
|
+
self.data.section_("subscriptions")
|
|
1012
|
+
self.data.subscriptions.outputSubs = []
|
|
1013
|
+
|
|
1014
|
+
outputDatasets = self.listOutputDatasetsAndModules()
|
|
1015
|
+
|
|
1016
|
+
for entry in enumerate(outputDatasets, start=1):
|
|
1017
|
+
subSectionName = "output%s" % entry[0]
|
|
1018
|
+
outputDataset = entry[1]["outputDataset"]
|
|
1019
|
+
outputModule = entry[1]["outputModule"]
|
|
1020
|
+
|
|
1021
|
+
dsSplit = outputDataset.split('/')
|
|
1022
|
+
primDs = dsSplit[1]
|
|
1023
|
+
tier = dsSplit[3]
|
|
1024
|
+
procDsSplit = dsSplit[2].split('-')
|
|
1025
|
+
skim = (len(procDsSplit) == 4)
|
|
1026
|
+
|
|
1027
|
+
if primaryDataset and primDs != primaryDataset:
|
|
1028
|
+
continue
|
|
1029
|
+
if useSkim and isSkim != skim:
|
|
1030
|
+
continue
|
|
1031
|
+
if dataTier and tier != dataTier:
|
|
1032
|
+
continue
|
|
1033
|
+
|
|
1034
|
+
self.data.subscriptions.outputSubs.append(subSectionName)
|
|
1035
|
+
outputSection = self.data.subscriptions.section_(subSectionName)
|
|
1036
|
+
outputSection.dataset = outputDataset
|
|
1037
|
+
outputSection.outputModule = outputModule
|
|
1038
|
+
outputSection.custodialSites = custodialSites
|
|
1039
|
+
outputSection.nonCustodialSites = nonCustodialSites
|
|
1040
|
+
outputSection.priority = priority
|
|
1041
|
+
outputSection.deleteFromSource = deleteFromSource
|
|
1042
|
+
outputSection.datasetLifetime = datasetLifetime
|
|
1043
|
+
|
|
1044
|
+
return
|
|
1045
|
+
|
|
1046
|
+
def getSubscriptionInformation(self):
|
|
1047
|
+
"""
|
|
1048
|
+
_getSubscriptionInformation_
|
|
1049
|
+
|
|
1050
|
+
Get the subscription configuration for the task
|
|
1051
|
+
return a dictionary with the following structure
|
|
1052
|
+
{<dataset> : {CustodialSites : [],
|
|
1053
|
+
NonCustodialSites : [],
|
|
1054
|
+
Priority : "Low"
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
"""
|
|
1058
|
+
if not hasattr(self.data, "subscriptions"):
|
|
1059
|
+
return {}
|
|
1060
|
+
|
|
1061
|
+
subKeyName = 'outputSubs'
|
|
1062
|
+
|
|
1063
|
+
subInformation = {}
|
|
1064
|
+
for outputSub in getattr(self.data.subscriptions, subKeyName):
|
|
1065
|
+
outputSection = getattr(self.data.subscriptions, outputSub)
|
|
1066
|
+
dataset = outputSection.dataset
|
|
1067
|
+
|
|
1068
|
+
subInformation[dataset] = {"CustodialSites": outputSection.custodialSites,
|
|
1069
|
+
"NonCustodialSites": outputSection.nonCustodialSites,
|
|
1070
|
+
"Priority": outputSection.priority,
|
|
1071
|
+
# These might not be present in all specs
|
|
1072
|
+
"DeleteFromSource": getattr(outputSection, "deleteFromSource", False),
|
|
1073
|
+
# Spec assigned for T0 ContainerRules
|
|
1074
|
+
"DatasetLifetime": getattr(outputSection, "datasetLifetime", 0)}
|
|
1075
|
+
return subInformation
|
|
1076
|
+
|
|
1077
|
+
def parentProcessingFlag(self):
|
|
1078
|
+
"""
|
|
1079
|
+
_parentProcessingFlag_
|
|
1080
|
+
|
|
1081
|
+
accessor for parentProcessing information (two file input)
|
|
1082
|
+
"""
|
|
1083
|
+
return self.jobSplittingParameters().get("include_parents", False)
|
|
1084
|
+
|
|
1085
|
+
def totalEvents(self):
|
|
1086
|
+
"""
|
|
1087
|
+
_totalEvents_
|
|
1088
|
+
|
|
1089
|
+
accessor for total events in the given dataset
|
|
1090
|
+
"""
|
|
1091
|
+
# TODO: save the total events for the production job
|
|
1092
|
+
return int(self.data.production.totalEvents)
|
|
1093
|
+
# return self.data.input.dataset.totalEvents
|
|
1094
|
+
|
|
1095
|
+
def dbsUrl(self):
|
|
1096
|
+
"""
|
|
1097
|
+
_dbsUrl_
|
|
1098
|
+
if local dbs url is set for the task, return it
|
|
1099
|
+
otherwise return None
|
|
1100
|
+
"""
|
|
1101
|
+
if getattr(self.data.input, "dataset", False):
|
|
1102
|
+
return getattr(self.data.input.dataset, "dbsurl", None)
|
|
1103
|
+
else:
|
|
1104
|
+
return None
|
|
1105
|
+
|
|
1106
|
+
def setTaskType(self, taskType):
|
|
1107
|
+
"""
|
|
1108
|
+
Set the type field of this task
|
|
1109
|
+
"""
|
|
1110
|
+
self.data.taskType = taskType
|
|
1111
|
+
|
|
1112
|
+
def taskType(self):
|
|
1113
|
+
"""
|
|
1114
|
+
_taskType_
|
|
1115
|
+
|
|
1116
|
+
Get the task Type setting
|
|
1117
|
+
"""
|
|
1118
|
+
return self.data.taskType
|
|
1119
|
+
|
|
1120
|
+
def completeTask(self, jobLocation, reportName):
|
|
1121
|
+
"""
|
|
1122
|
+
_completeTask_
|
|
1123
|
+
|
|
1124
|
+
Combine all the logs from all the steps in the task to a single log
|
|
1125
|
+
|
|
1126
|
+
If necessary, output to Dashboard
|
|
1127
|
+
"""
|
|
1128
|
+
from WMCore.FwkJobReport.Report import Report
|
|
1129
|
+
|
|
1130
|
+
finalReport = Report()
|
|
1131
|
+
# We left the master report at the pilot scratch area level
|
|
1132
|
+
testPath = os.path.join(jobLocation, '../../', reportName)
|
|
1133
|
+
logging.info("Looking for master report at %s", testPath)
|
|
1134
|
+
if os.path.exists(testPath):
|
|
1135
|
+
logging.info(" found it!")
|
|
1136
|
+
# If a report already exists, we load it and
|
|
1137
|
+
# append our steps to it
|
|
1138
|
+
finalReport.load(testPath)
|
|
1139
|
+
taskSteps = self.listAllStepNames()
|
|
1140
|
+
for taskStep in taskSteps:
|
|
1141
|
+
reportPath = os.path.join(jobLocation, taskStep, "Report.pkl")
|
|
1142
|
+
logging.info("Looking for a taskStep report at %s", reportPath)
|
|
1143
|
+
if os.path.isfile(reportPath):
|
|
1144
|
+
logging.info(" found it!")
|
|
1145
|
+
stepReport = Report()
|
|
1146
|
+
stepReport.unpersist(reportPath, taskStep)
|
|
1147
|
+
finalReport.setStep(taskStep, stepReport.retrieveStep(taskStep))
|
|
1148
|
+
logURL = stepReport.getLogURL()
|
|
1149
|
+
if logURL:
|
|
1150
|
+
finalReport.setLogURL(logURL)
|
|
1151
|
+
else:
|
|
1152
|
+
msg = " failed to find it."
|
|
1153
|
+
msg += "Files in the directory are:\n%s" % os.listdir(os.path.join(jobLocation, taskStep))
|
|
1154
|
+
logging.error(msg)
|
|
1155
|
+
# Then we have a missing report
|
|
1156
|
+
# This should raise an alarm bell, as per Steve's request
|
|
1157
|
+
# TODO: Change error code
|
|
1158
|
+
finalReport.addStep(reportname=taskStep, status=1)
|
|
1159
|
+
finalReport.addError(stepName=taskStep, exitCode=99996, errorType="ReportManipulatingError",
|
|
1160
|
+
errorDetails="Failed to find a step report for %s!" % taskStep)
|
|
1161
|
+
|
|
1162
|
+
finalReport.data.completed = True
|
|
1163
|
+
finalReport.persist(reportName)
|
|
1164
|
+
|
|
1165
|
+
return finalReport
|
|
1166
|
+
|
|
1167
|
+
def taskLogBaseLFN(self):
|
|
1168
|
+
"""
|
|
1169
|
+
_taskLogBaseLFN_
|
|
1170
|
+
|
|
1171
|
+
Get the base LFN for the task's log archive file.
|
|
1172
|
+
"""
|
|
1173
|
+
return getattr(self.data, "logBaseLFN", "/store/temp/WMAgent/unmerged")
|
|
1174
|
+
|
|
1175
|
+
def setTaskLogBaseLFN(self, logBaseLFN):
|
|
1176
|
+
"""
|
|
1177
|
+
_setTaskLogBaseLFN_
|
|
1178
|
+
|
|
1179
|
+
Set the base LFN for the task's log archive file.
|
|
1180
|
+
"""
|
|
1181
|
+
self.data.logBaseLFN = logBaseLFN
|
|
1182
|
+
return
|
|
1183
|
+
|
|
1184
|
+
def addNotification(self, target):
|
|
1185
|
+
"""
|
|
1186
|
+
_addNotification_
|
|
1187
|
+
|
|
1188
|
+
Add a target to be notified on workflow completion
|
|
1189
|
+
"""
|
|
1190
|
+
|
|
1191
|
+
self.data.notifications.targets.append(target)
|
|
1192
|
+
return
|
|
1193
|
+
|
|
1194
|
+
def getNotifications(self):
|
|
1195
|
+
"""
|
|
1196
|
+
_getNotifications_
|
|
1197
|
+
|
|
1198
|
+
Get all targets for notification at workflow completion
|
|
1199
|
+
"""
|
|
1200
|
+
|
|
1201
|
+
return self.data.notifications.targets
|
|
1202
|
+
|
|
1203
|
+
def _setPerformanceMonitorConfig(self):
|
|
1204
|
+
"""
|
|
1205
|
+
if config section for the PerformanceMonitor. If not set, it will set one
|
|
1206
|
+
"""
|
|
1207
|
+
if self.monitoring is not None:
|
|
1208
|
+
return
|
|
1209
|
+
|
|
1210
|
+
self.monitoring = self.data.section_("watchdog")
|
|
1211
|
+
if not hasattr(self.data.watchdog, 'monitors'):
|
|
1212
|
+
self.data.watchdog.monitors = []
|
|
1213
|
+
if 'PerformanceMonitor' not in self.monitoring.monitors:
|
|
1214
|
+
self.monitoring.monitors.append('PerformanceMonitor')
|
|
1215
|
+
self.monitoring.section_("PerformanceMonitor")
|
|
1216
|
+
return
|
|
1217
|
+
|
|
1218
|
+
def setMaxPSS(self, maxPSS):
|
|
1219
|
+
"""
|
|
1220
|
+
_setMaxPSS_
|
|
1221
|
+
|
|
1222
|
+
Set MaxPSS performance monitoring for this task.
|
|
1223
|
+
:param maxPSS: maximum Proportional Set Size (PSS) memory consumption in MiB
|
|
1224
|
+
"""
|
|
1225
|
+
if self.taskType() in ["Merge", "Cleanup", "LogCollect"]:
|
|
1226
|
+
# keep the default settings (from StdBase) for these task types
|
|
1227
|
+
return
|
|
1228
|
+
|
|
1229
|
+
if isinstance(maxPSS, dict):
|
|
1230
|
+
maxPSS = maxPSS.get(self.name(), None)
|
|
1231
|
+
|
|
1232
|
+
if maxPSS:
|
|
1233
|
+
self._setPerformanceMonitorConfig()
|
|
1234
|
+
self.monitoring.PerformanceMonitor.maxPSS = int(maxPSS)
|
|
1235
|
+
for task in self.childTaskIterator():
|
|
1236
|
+
task.setMaxPSS(maxPSS)
|
|
1237
|
+
return
|
|
1238
|
+
|
|
1239
|
+
def setPerformanceMonitor(self, softTimeout=None, gracePeriod=None):
|
|
1240
|
+
"""
|
|
1241
|
+
_setPerformanceMonitor_
|
|
1242
|
+
|
|
1243
|
+
Set/Update the performance monitor options for the task
|
|
1244
|
+
"""
|
|
1245
|
+
# make sure there is a PerformanceMonitor section in the task
|
|
1246
|
+
self._setPerformanceMonitorConfig()
|
|
1247
|
+
|
|
1248
|
+
if softTimeout:
|
|
1249
|
+
self.monitoring.PerformanceMonitor.softTimeout = int(softTimeout)
|
|
1250
|
+
if gracePeriod:
|
|
1251
|
+
self.monitoring.PerformanceMonitor.hardTimeout = int(softTimeout + gracePeriod)
|
|
1252
|
+
|
|
1253
|
+
return
|
|
1254
|
+
|
|
1255
|
+
def getSwVersion(self, allSteps=False):
|
|
1256
|
+
"""
|
|
1257
|
+
_getSwVersion_
|
|
1258
|
+
|
|
1259
|
+
Get the CMSSW version for the first CMSSW step in this task.
|
|
1260
|
+
:param allSteps: set it to True to retrieve a list of CMSSW releases
|
|
1261
|
+
used in this task
|
|
1262
|
+
:return: a string with the release name or a list of releases if allSteps is True.
|
|
1263
|
+
"""
|
|
1264
|
+
versions = []
|
|
1265
|
+
for stepName in self.listAllStepNames():
|
|
1266
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1267
|
+
if stepHelper.stepType() in ["CMSSW", "LogCollect"]:
|
|
1268
|
+
if not allSteps:
|
|
1269
|
+
return stepHelper.getCMSSWVersion()
|
|
1270
|
+
else:
|
|
1271
|
+
versions.append(stepHelper.getCMSSWVersion())
|
|
1272
|
+
return versions
|
|
1273
|
+
|
|
1274
|
+
def getScramArch(self, allSteps=False):
|
|
1275
|
+
"""
|
|
1276
|
+
_getScramArch_
|
|
1277
|
+
|
|
1278
|
+
Get the scram architecture for the first CMSSW step of workload.
|
|
1279
|
+
Set allSteps to true to retrieve all the scramArchs used in this task.
|
|
1280
|
+
"""
|
|
1281
|
+
scrams = []
|
|
1282
|
+
for stepName in self.listAllStepNames():
|
|
1283
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1284
|
+
if stepHelper.stepType() in ["CMSSW", "LogCollect"]:
|
|
1285
|
+
if not allSteps:
|
|
1286
|
+
return stepHelper.getScramArch()
|
|
1287
|
+
else:
|
|
1288
|
+
scrams.append(stepHelper.getScramArch())
|
|
1289
|
+
return scrams
|
|
1290
|
+
|
|
1291
|
+
def setPrimarySubType(self, subType):
|
|
1292
|
+
"""
|
|
1293
|
+
_setPrimarySubType_
|
|
1294
|
+
|
|
1295
|
+
Set the subType that should be used by WorkQueue for the
|
|
1296
|
+
primary subscription
|
|
1297
|
+
"""
|
|
1298
|
+
|
|
1299
|
+
self.data.parameters.primarySubType = subType
|
|
1300
|
+
return
|
|
1301
|
+
|
|
1302
|
+
def getPrimarySubType(self):
|
|
1303
|
+
"""
|
|
1304
|
+
_getPrimarySubType_
|
|
1305
|
+
|
|
1306
|
+
Retrieve the primary subType
|
|
1307
|
+
If not available, use the taskType
|
|
1308
|
+
"""
|
|
1309
|
+
|
|
1310
|
+
return getattr(self.data.parameters, 'primarySubType',
|
|
1311
|
+
self.taskType())
|
|
1312
|
+
|
|
1313
|
+
def getConfigCacheIDs(self):
|
|
1314
|
+
"""
|
|
1315
|
+
_getConfigCacheIDs_
|
|
1316
|
+
|
|
1317
|
+
Search constituent steps for ConfigCacheID
|
|
1318
|
+
"""
|
|
1319
|
+
|
|
1320
|
+
IDs = []
|
|
1321
|
+
for stepName in self.listAllStepNames():
|
|
1322
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1323
|
+
ID = stepHelper.getConfigCacheID()
|
|
1324
|
+
if ID:
|
|
1325
|
+
IDs.append(ID)
|
|
1326
|
+
return IDs
|
|
1327
|
+
|
|
1328
|
+
def getPhysicsTaskType(self):
|
|
1329
|
+
"""
|
|
1330
|
+
Return the physics Task type
|
|
1331
|
+
:return: str
|
|
1332
|
+
"""
|
|
1333
|
+
return getattr(self.data, 'physicsTaskType', None)
|
|
1334
|
+
|
|
1335
|
+
def getStepPhysicsTypes(self):
|
|
1336
|
+
"""
|
|
1337
|
+
Get the physics types for all cmsRun steps
|
|
1338
|
+
:return: list
|
|
1339
|
+
"""
|
|
1340
|
+
types = []
|
|
1341
|
+
for stepName in self.listAllStepNames(cmsRunOnly=True):
|
|
1342
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1343
|
+
stepType = stepHelper.getPhysicsType()
|
|
1344
|
+
if stepType:
|
|
1345
|
+
types.append(stepType)
|
|
1346
|
+
else:
|
|
1347
|
+
types.append("UNKNOWN")
|
|
1348
|
+
return types
|
|
1349
|
+
|
|
1350
|
+
def setPhysicsTaskType(self):
|
|
1351
|
+
"""
|
|
1352
|
+
Set the physics task type based on the steps parameter
|
|
1353
|
+
for a step.
|
|
1354
|
+
We basically expand the standard "Production/Processing" taskType to
|
|
1355
|
+
detail the process better.
|
|
1356
|
+
For MC: "Production" or "Processing" to ["GENSIM", "GEN", "DIGI", "RECO", "DIGIRECO", "MINIAOD"]
|
|
1357
|
+
For Data: "Processing" to "Dataprocessing"
|
|
1358
|
+
:return: str
|
|
1359
|
+
"""
|
|
1360
|
+
physicsTaskType = "UNKNOWN"
|
|
1361
|
+
if self.taskType() in ("Processing", "Production"):
|
|
1362
|
+
stepTypes = self.getStepPhysicsTypes()
|
|
1363
|
+
if "DataProcessing" in stepTypes:
|
|
1364
|
+
# For data, return a single DataProcessing step
|
|
1365
|
+
physicsTaskType = "DataProcessing"
|
|
1366
|
+
else:
|
|
1367
|
+
# For MC, join all physics steps
|
|
1368
|
+
physicsTaskType = ",".join(stepTypes)
|
|
1369
|
+
else:
|
|
1370
|
+
# Other task types are not physics types
|
|
1371
|
+
physicsTaskType = None
|
|
1372
|
+
|
|
1373
|
+
self.data.physicsTaskType = physicsTaskType
|
|
1374
|
+
|
|
1375
|
+
def setProcessingVersion(self, procVer, parentProcessingVersion=0, stepChainMap=False):
|
|
1376
|
+
"""
|
|
1377
|
+
_setProcessingVersion_
|
|
1378
|
+
|
|
1379
|
+
Set the task processing version
|
|
1380
|
+
"""
|
|
1381
|
+
if isinstance(procVer, dict) and stepChainMap:
|
|
1382
|
+
taskProcVer = self._getStepValue(procVer, parentProcessingVersion)
|
|
1383
|
+
self._setStepProperty("ProcessingVersion", procVer, stepChainMap)
|
|
1384
|
+
elif isinstance(procVer, dict):
|
|
1385
|
+
taskProcVer = procVer.get(self.name(), parentProcessingVersion)
|
|
1386
|
+
if taskProcVer is None:
|
|
1387
|
+
for taskname in procVer:
|
|
1388
|
+
if taskname in self.name():
|
|
1389
|
+
taskProcVer = procVer[taskname]
|
|
1390
|
+
else:
|
|
1391
|
+
taskProcVer = procVer
|
|
1392
|
+
|
|
1393
|
+
self.data.parameters.processingVersion = int(taskProcVer)
|
|
1394
|
+
for task in self.childTaskIterator():
|
|
1395
|
+
task.setProcessingVersion(procVer, taskProcVer, stepChainMap)
|
|
1396
|
+
return
|
|
1397
|
+
|
|
1398
|
+
def getProcessingVersion(self):
|
|
1399
|
+
"""
|
|
1400
|
+
_getProcessingVersion_
|
|
1401
|
+
|
|
1402
|
+
Get the task processing version
|
|
1403
|
+
"""
|
|
1404
|
+
return getattr(self.data.parameters, 'processingVersion', 0)
|
|
1405
|
+
|
|
1406
|
+
def setProcessingString(self, procString, parentProcessingString=None, stepChainMap=False):
|
|
1407
|
+
"""
|
|
1408
|
+
_setProcessingString_
|
|
1409
|
+
|
|
1410
|
+
Set the task processing string
|
|
1411
|
+
"""
|
|
1412
|
+
if isinstance(procString, dict) and stepChainMap:
|
|
1413
|
+
taskProcString = self._getStepValue(procString, parentProcessingString)
|
|
1414
|
+
self._setStepProperty("ProcessingString", procString, stepChainMap)
|
|
1415
|
+
elif isinstance(procString, dict):
|
|
1416
|
+
taskProcString = procString.get(self.name(), parentProcessingString)
|
|
1417
|
+
if taskProcString is None:
|
|
1418
|
+
for taskname in procString:
|
|
1419
|
+
if taskname in self.name():
|
|
1420
|
+
taskProcString = procString[taskname]
|
|
1421
|
+
else:
|
|
1422
|
+
taskProcString = procString
|
|
1423
|
+
|
|
1424
|
+
self.data.parameters.processingString = taskProcString
|
|
1425
|
+
|
|
1426
|
+
for task in self.childTaskIterator():
|
|
1427
|
+
task.setProcessingString(procString, taskProcString, stepChainMap)
|
|
1428
|
+
return
|
|
1429
|
+
|
|
1430
|
+
def getProcessingString(self):
|
|
1431
|
+
"""
|
|
1432
|
+
_getProcessingString_
|
|
1433
|
+
|
|
1434
|
+
Get the task processing string
|
|
1435
|
+
"""
|
|
1436
|
+
return getattr(self.data.parameters, 'processingString', None)
|
|
1437
|
+
|
|
1438
|
+
def getCMSSWVersionsWithMergeTask(self):
|
|
1439
|
+
"""
|
|
1440
|
+
_getCMSSWVersionsWithMergeTask_
|
|
1441
|
+
|
|
1442
|
+
Get the all the cmssw versions for this task plus first generation merge task cmssw version.
|
|
1443
|
+
This will be used to validate and check in the script.
|
|
1444
|
+
Merge cmssw version should be the same as processing version
|
|
1445
|
+
"""
|
|
1446
|
+
versions = set()
|
|
1447
|
+
for stepName in self.listAllStepNames():
|
|
1448
|
+
|
|
1449
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1450
|
+
if stepHelper.stepType() != "CMSSW":
|
|
1451
|
+
continue
|
|
1452
|
+
version = stepHelper.getCMSSWVersion()
|
|
1453
|
+
versions.add(version)
|
|
1454
|
+
|
|
1455
|
+
for task in self.childTaskIterator():
|
|
1456
|
+
if task.taskType() == "Merge":
|
|
1457
|
+
for stepName in task.listAllStepNames():
|
|
1458
|
+
|
|
1459
|
+
stepHelper = task.getStepHelper(stepName)
|
|
1460
|
+
if stepHelper.stepType() != "CMSSW":
|
|
1461
|
+
continue
|
|
1462
|
+
version = stepHelper.getCMSSWVersion()
|
|
1463
|
+
versions.add(version)
|
|
1464
|
+
|
|
1465
|
+
return versions
|
|
1466
|
+
|
|
1467
|
+
def setNumberOfCores(self, cores, nStreams):
|
|
1468
|
+
"""
|
|
1469
|
+
_setNumberOfCores_
|
|
1470
|
+
|
|
1471
|
+
Set number of cores and event streams for each CMSSW step in this task and its children
|
|
1472
|
+
"""
|
|
1473
|
+
if self.taskType() in ["Merge", "Harvesting", "Cleanup", "LogCollect"]:
|
|
1474
|
+
return
|
|
1475
|
+
|
|
1476
|
+
if isinstance(cores, dict):
|
|
1477
|
+
taskCores = cores.get(self.name())
|
|
1478
|
+
else:
|
|
1479
|
+
taskCores = cores
|
|
1480
|
+
|
|
1481
|
+
if isinstance(nStreams, dict):
|
|
1482
|
+
taskStreams = nStreams.get(self.name(), 0)
|
|
1483
|
+
else:
|
|
1484
|
+
taskStreams = nStreams
|
|
1485
|
+
|
|
1486
|
+
if taskCores:
|
|
1487
|
+
for stepName in self.listAllStepNames():
|
|
1488
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1489
|
+
if stepHelper.stepType() == "CMSSW":
|
|
1490
|
+
stepHelper.setNumberOfCores(taskCores, taskStreams)
|
|
1491
|
+
|
|
1492
|
+
for task in self.childTaskIterator():
|
|
1493
|
+
task.setNumberOfCores(cores, nStreams)
|
|
1494
|
+
|
|
1495
|
+
return
|
|
1496
|
+
|
|
1497
|
+
def getNumberOfCores(self):
|
|
1498
|
+
"""
|
|
1499
|
+
Retrieves the number of cores for this task.
|
|
1500
|
+
If it's a multi-step task, it returns only the greatest value
|
|
1501
|
+
:return: an integer with the number of cores required by this task
|
|
1502
|
+
"""
|
|
1503
|
+
maxCores = 1
|
|
1504
|
+
for stepName in self.listAllStepNames():
|
|
1505
|
+
stepHelper = self.getStep(stepName)
|
|
1506
|
+
maxCores = max(maxCores, stepHelper.getNumberOfCores())
|
|
1507
|
+
return maxCores
|
|
1508
|
+
|
|
1509
|
+
def setTaskGPUSettings(self, requiresGPU, gpuParams):
|
|
1510
|
+
"""
|
|
1511
|
+
Setter method for the GPU settings, applied to this Task object and
|
|
1512
|
+
all underneath CMSSW type step object.
|
|
1513
|
+
:param requiresGPU: string defining whether GPUs are needed. For TaskChains, it
|
|
1514
|
+
could be a dictionary key'ed by the taskname.
|
|
1515
|
+
:param gpuParams: GPU settings. A JSON encoded object, from either a None object
|
|
1516
|
+
or a dictionary. For TaskChains, it could be a dictionary key'ed by the taskname
|
|
1517
|
+
:return: nothing, the workload spec is updated in place.
|
|
1518
|
+
"""
|
|
1519
|
+
# these job types shall not have these settings
|
|
1520
|
+
if self.taskType() in ["Merge", "Harvesting", "Cleanup", "LogCollect"]:
|
|
1521
|
+
return
|
|
1522
|
+
|
|
1523
|
+
# default values come from StdBase
|
|
1524
|
+
if isinstance(requiresGPU, dict):
|
|
1525
|
+
thisTaskGPU = requiresGPU.get(self.name(), "forbidden")
|
|
1526
|
+
else:
|
|
1527
|
+
thisTaskGPU = requiresGPU
|
|
1528
|
+
|
|
1529
|
+
decodedGpuParams = json.loads(gpuParams)
|
|
1530
|
+
if self.name() in decodedGpuParams:
|
|
1531
|
+
thisTaskGPUParams = decodedGpuParams[self.name()]
|
|
1532
|
+
else:
|
|
1533
|
+
thisTaskGPUParams = decodedGpuParams
|
|
1534
|
+
|
|
1535
|
+
for stepName in self.listAllStepNames():
|
|
1536
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1537
|
+
if stepHelper.stepType() == "CMSSW":
|
|
1538
|
+
stepHelper.setGPUSettings(thisTaskGPU, thisTaskGPUParams)
|
|
1539
|
+
|
|
1540
|
+
for task in self.childTaskIterator():
|
|
1541
|
+
task.setTaskGPUSettings(requiresGPU, gpuParams)
|
|
1542
|
+
|
|
1543
|
+
return
|
|
1544
|
+
|
|
1545
|
+
def getRequiresGPU(self):
|
|
1546
|
+
"""
|
|
1547
|
+
Return whether this task is supposed to use GPUs or not.
|
|
1548
|
+
If it's a multi-step task, decision follows this order:
|
|
1549
|
+
1. "required"
|
|
1550
|
+
2. "optional"
|
|
1551
|
+
3. "forbidden"
|
|
1552
|
+
:return: a string (default to "forbidden")
|
|
1553
|
+
"""
|
|
1554
|
+
requiresGPU = set(["forbidden"])
|
|
1555
|
+
for stepName in self.listAllStepNames():
|
|
1556
|
+
stepHelper = self.getStep(stepName)
|
|
1557
|
+
if stepHelper.stepType() == "CMSSW" and stepHelper.getGPURequired():
|
|
1558
|
+
requiresGPU.add(stepHelper.getGPURequired())
|
|
1559
|
+
|
|
1560
|
+
# now decide what value has higher weight
|
|
1561
|
+
if len(requiresGPU) == 1:
|
|
1562
|
+
return requiresGPU.pop()
|
|
1563
|
+
elif "required" in requiresGPU:
|
|
1564
|
+
return "required"
|
|
1565
|
+
elif "optional" in requiresGPU:
|
|
1566
|
+
return "optional"
|
|
1567
|
+
else:
|
|
1568
|
+
return "forbidden"
|
|
1569
|
+
|
|
1570
|
+
def getGPURequirements(self):
|
|
1571
|
+
"""
|
|
1572
|
+
Return the GPU requirements for this task.
|
|
1573
|
+
If it's a multi-step task, the first step with a meaningful
|
|
1574
|
+
dictionary value will be returned
|
|
1575
|
+
:return: a dictionary with the GPU requirements for this task
|
|
1576
|
+
"""
|
|
1577
|
+
gpuRequirements = {}
|
|
1578
|
+
for stepName in sorted(self.listAllStepNames()):
|
|
1579
|
+
stepHelper = self.getStep(stepName)
|
|
1580
|
+
if stepHelper.stepType() == "CMSSW" and stepHelper.getGPURequirements():
|
|
1581
|
+
return stepHelper.getGPURequirements()
|
|
1582
|
+
return gpuRequirements
|
|
1583
|
+
|
|
1584
|
+
def setJobExtraMatchRequirements(self, extraRequirements):
|
|
1585
|
+
"""
|
|
1586
|
+
Set the job extra matchmaking requirements for this task
|
|
1587
|
+
:param extraRequirements: string with the extra match requirements
|
|
1588
|
+
:return: nothing, the workload spec is updated in place.
|
|
1589
|
+
"""
|
|
1590
|
+
self.data.constraints.jobExtraMatchRequirements = extraRequirements
|
|
1591
|
+
#for task in self.childTaskIterator():
|
|
1592
|
+
# task.setJobExtraMatchRequirements(extraRequirements)
|
|
1593
|
+
|
|
1594
|
+
def getJobExtraMatchRequirements(self):
|
|
1595
|
+
"""
|
|
1596
|
+
Get the job extra matchmaking requirements for this task
|
|
1597
|
+
:return: string with the extra match requirements
|
|
1598
|
+
"""
|
|
1599
|
+
return getattr(self.data.constraints, 'jobExtraMatchRequirements', "")
|
|
1600
|
+
|
|
1601
|
+
def _getStepValue(self, keyDict, defaultValue):
|
|
1602
|
+
"""
|
|
1603
|
+
__getStepValue_
|
|
1604
|
+
|
|
1605
|
+
Maps this taskName - in somehow a hacky way - to a 'StepName' value
|
|
1606
|
+
that should exist in a StepChain request. Used only on tasks that have
|
|
1607
|
+
output module
|
|
1608
|
+
:param keyDict: a dict with either AcqEra/ProcStr/ProcVer key/value pairs,
|
|
1609
|
+
where the key corresponds to the StepName
|
|
1610
|
+
"""
|
|
1611
|
+
if self.taskType() == "Merge":
|
|
1612
|
+
extractedTaskName = self.name().split("Merge")[0]
|
|
1613
|
+
value = keyDict.get(extractedTaskName)
|
|
1614
|
+
elif self.taskType() in ["Production", "Processing"]:
|
|
1615
|
+
value = keyDict.get(self.name())
|
|
1616
|
+
else:
|
|
1617
|
+
value = defaultValue
|
|
1618
|
+
|
|
1619
|
+
return value
|
|
1620
|
+
|
|
1621
|
+
def _setStepProperty(self, propertyName, propertyDict, stepMap):
|
|
1622
|
+
"""
|
|
1623
|
+
For StepChain workloads, we also need to set AcqEra/ProcStr/ProcVer
|
|
1624
|
+
at the WMStep level, such that we can properly map different cmsRun
|
|
1625
|
+
steps - within the same task - to different meta data information.
|
|
1626
|
+
:param propertyName: the name of the property to set at step level
|
|
1627
|
+
:param propertyDict: a dictionary mapping StepName to its value
|
|
1628
|
+
:param stepMap: map between step name, step number and cmsRun number,
|
|
1629
|
+
same as returned from the workload getStepMapping
|
|
1630
|
+
"""
|
|
1631
|
+
propMethodMap = {"AcquisitionEra": "setAcqEra",
|
|
1632
|
+
"ProcessingString": "setProcStr",
|
|
1633
|
+
"ProcessingVersion": "setProcStr"}
|
|
1634
|
+
|
|
1635
|
+
if self.taskType() not in ["Production", "Processing"]:
|
|
1636
|
+
# then there is no need to set anything, single cmsRun step at most
|
|
1637
|
+
return
|
|
1638
|
+
|
|
1639
|
+
for stepName, stepValues in viewitems(stepMap):
|
|
1640
|
+
cmsRunNum = stepValues[1]
|
|
1641
|
+
stepHelper = self.getStepHelper(cmsRunNum)
|
|
1642
|
+
callableMethod = getattr(stepHelper, propMethodMap[propertyName])
|
|
1643
|
+
callableMethod(propertyDict[stepName])
|
|
1644
|
+
|
|
1645
|
+
def setAcquisitionEra(self, era, parentAcquisitionEra=None, stepChainMap=False):
|
|
1646
|
+
"""
|
|
1647
|
+
_setAcquistionEra_
|
|
1648
|
+
|
|
1649
|
+
Set the task acquisition era
|
|
1650
|
+
"""
|
|
1651
|
+
|
|
1652
|
+
if isinstance(era, dict) and stepChainMap:
|
|
1653
|
+
taskEra = self._getStepValue(era, parentAcquisitionEra)
|
|
1654
|
+
self._setStepProperty("AcquisitionEra", era, stepChainMap)
|
|
1655
|
+
elif isinstance(era, dict):
|
|
1656
|
+
taskEra = era.get(self.name(), parentAcquisitionEra)
|
|
1657
|
+
if taskEra is None:
|
|
1658
|
+
# We cannot properly set AcqEra for ACDC of TaskChain Merge
|
|
1659
|
+
# failures, so we should look up for a similar taskname in
|
|
1660
|
+
# the acqera dict passed from the requestor
|
|
1661
|
+
for taskname in era:
|
|
1662
|
+
if taskname in self.name():
|
|
1663
|
+
taskEra = era[taskname]
|
|
1664
|
+
else:
|
|
1665
|
+
taskEra = era
|
|
1666
|
+
|
|
1667
|
+
self.data.parameters.acquisitionEra = taskEra
|
|
1668
|
+
|
|
1669
|
+
for task in self.childTaskIterator():
|
|
1670
|
+
task.setAcquisitionEra(era, taskEra, stepChainMap)
|
|
1671
|
+
return
|
|
1672
|
+
|
|
1673
|
+
def getAcquisitionEra(self):
|
|
1674
|
+
"""
|
|
1675
|
+
_getAcquisitionEra_
|
|
1676
|
+
|
|
1677
|
+
Get the task acquisition era.
|
|
1678
|
+
"""
|
|
1679
|
+
return getattr(self.data.parameters, 'acquisitionEra', None)
|
|
1680
|
+
|
|
1681
|
+
def setCampaignName(self, campaign):
|
|
1682
|
+
"""
|
|
1683
|
+
Set the campaign name of this task
|
|
1684
|
+
:param campaign: str, name of the campaign to be defined
|
|
1685
|
+
"""
|
|
1686
|
+
self.data.campaignName = campaign
|
|
1687
|
+
|
|
1688
|
+
def getCampaignName(self):
|
|
1689
|
+
"""
|
|
1690
|
+
Get the task campaign name
|
|
1691
|
+
:return: str, campaign name value
|
|
1692
|
+
"""
|
|
1693
|
+
return getattr(self.data, 'campaignName', None)
|
|
1694
|
+
|
|
1695
|
+
def setLumiMask(self, lumiMask=None, override=True):
|
|
1696
|
+
"""
|
|
1697
|
+
Attach the given LumiMask to the task
|
|
1698
|
+
At this point the lumi mask is just the compactList dict not the LumiList object
|
|
1699
|
+
"""
|
|
1700
|
+
|
|
1701
|
+
if not lumiMask:
|
|
1702
|
+
return
|
|
1703
|
+
|
|
1704
|
+
runs = getattr(self.data.input.splitting, 'runs', None)
|
|
1705
|
+
lumis = getattr(self.data.input.splitting, 'lumis', None)
|
|
1706
|
+
if not override and runs and lumis: # Unless instructed, don't overwrite runs and lumis which may be there from a task already
|
|
1707
|
+
return
|
|
1708
|
+
|
|
1709
|
+
runs = []
|
|
1710
|
+
lumis = []
|
|
1711
|
+
for run, runLumis in viewitems(lumiMask):
|
|
1712
|
+
runs.append(int(run))
|
|
1713
|
+
lumiList = []
|
|
1714
|
+
for lumi in runLumis:
|
|
1715
|
+
lumiList.extend([str(l) for l in lumi])
|
|
1716
|
+
lumis.append(','.join(lumiList))
|
|
1717
|
+
|
|
1718
|
+
self.data.input.splitting.runs = runs
|
|
1719
|
+
self.data.input.splitting.lumis = lumis
|
|
1720
|
+
|
|
1721
|
+
for task in self.childTaskIterator():
|
|
1722
|
+
task.setLumiMask(lumiMask, override)
|
|
1723
|
+
|
|
1724
|
+
return
|
|
1725
|
+
|
|
1726
|
+
def getLumiMask(self):
|
|
1727
|
+
"""
|
|
1728
|
+
return the lumi mask
|
|
1729
|
+
"""
|
|
1730
|
+
runs = getattr(self.data.input.splitting, 'runs', None)
|
|
1731
|
+
lumis = getattr(self.data.input.splitting, 'lumis', None)
|
|
1732
|
+
if runs and lumis:
|
|
1733
|
+
return LumiList(wmagentFormat=(runs, lumis))
|
|
1734
|
+
|
|
1735
|
+
return {}
|
|
1736
|
+
|
|
1737
|
+
def _propMethodMap(self):
|
|
1738
|
+
"""
|
|
1739
|
+
internal mapping methop which maps which method need to be call for each
|
|
1740
|
+
property.
|
|
1741
|
+
For now only contains properties which updates in assignment stage.
|
|
1742
|
+
"""
|
|
1743
|
+
propMap = {"ProcessingVersion": self.setProcessingVersion,
|
|
1744
|
+
"AcquisitionEra": self.setAcquisitionEra,
|
|
1745
|
+
"ProcessingString": self.setProcessingString
|
|
1746
|
+
}
|
|
1747
|
+
return propMap
|
|
1748
|
+
|
|
1749
|
+
def setProperties(self, properties):
|
|
1750
|
+
"""
|
|
1751
|
+
set task properties (only for assignment stage but make it more general)
|
|
1752
|
+
"""
|
|
1753
|
+
for prop, value in viewitems(properties):
|
|
1754
|
+
self._propMethodMap()[prop](value)
|
|
1755
|
+
|
|
1756
|
+
def deleteChild(self, childName):
|
|
1757
|
+
"""
|
|
1758
|
+
_deleteChild_
|
|
1759
|
+
|
|
1760
|
+
Remove the child task from the tree, if it exists
|
|
1761
|
+
"""
|
|
1762
|
+
self.deleteNode(childName)
|
|
1763
|
+
|
|
1764
|
+
def setPrepID(self, prepID):
|
|
1765
|
+
"""
|
|
1766
|
+
_setPrepID_
|
|
1767
|
+
|
|
1768
|
+
Set the prepID to for all the tasks below
|
|
1769
|
+
"""
|
|
1770
|
+
# if prepID doesn exist set it, if exist ignore.
|
|
1771
|
+
if not self.getPrepID() and prepID:
|
|
1772
|
+
self.data.prepID = prepID
|
|
1773
|
+
|
|
1774
|
+
prepID = self.getPrepID()
|
|
1775
|
+
# set child prepid
|
|
1776
|
+
if prepID:
|
|
1777
|
+
for task in self.childTaskIterator():
|
|
1778
|
+
task.setPrepID(prepID)
|
|
1779
|
+
|
|
1780
|
+
def getPrepID(self):
|
|
1781
|
+
"""
|
|
1782
|
+
_getPrepID_
|
|
1783
|
+
|
|
1784
|
+
Get the prepID for the workflow
|
|
1785
|
+
"""
|
|
1786
|
+
return getattr(self.data, 'prepID', None)
|
|
1787
|
+
|
|
1788
|
+
def setLFNBase(self, mergedLFNBase, unmergedLFNBase):
|
|
1789
|
+
"""
|
|
1790
|
+
_setLFNBase_
|
|
1791
|
+
|
|
1792
|
+
Set the merged and unmerged base LFNs for all tasks.
|
|
1793
|
+
"""
|
|
1794
|
+
self.data.mergedLFNBase = mergedLFNBase
|
|
1795
|
+
self.data.unmergedLFNBase = unmergedLFNBase
|
|
1796
|
+
for task in self.childTaskIterator():
|
|
1797
|
+
task.setLFNBase(mergedLFNBase, unmergedLFNBase)
|
|
1798
|
+
|
|
1799
|
+
return
|
|
1800
|
+
|
|
1801
|
+
def _getLFNBase(self):
|
|
1802
|
+
"""
|
|
1803
|
+
private method getting lfn base.
|
|
1804
|
+
lfn base should be set by workflow
|
|
1805
|
+
"""
|
|
1806
|
+
return (getattr(self.data, 'mergedLFNBase', "/store/data"),
|
|
1807
|
+
getattr(self.data, 'unmergedLFNBase', "/store/unmerged"))
|
|
1808
|
+
|
|
1809
|
+
def _getKeyValue(self, keyname, stepname, values):
|
|
1810
|
+
if keyname not in values:
|
|
1811
|
+
return
|
|
1812
|
+
elif isinstance(values[keyname], (newstr, bytes)):
|
|
1813
|
+
return values[keyname]
|
|
1814
|
+
elif isinstance(values[keyname], dict):
|
|
1815
|
+
return values[keyname].get(stepname)
|
|
1816
|
+
|
|
1817
|
+
def _updateLFNsStepChain(self, stepName, dictValues, stepMapping):
|
|
1818
|
+
"""
|
|
1819
|
+
__updateLFNsStepChain_
|
|
1820
|
+
|
|
1821
|
+
Helper function needed for a proper StepChain LFN/ProcessedDataset handling
|
|
1822
|
+
|
|
1823
|
+
:param stepName: is the cmsRun name (cmsRun1, cmsRun2, ...)
|
|
1824
|
+
:param dictValues: part of the arguments provided during assignment
|
|
1825
|
+
:param stepMapping: built during StepChain creation
|
|
1826
|
+
:return: a single string for each of those 3 properties
|
|
1827
|
+
"""
|
|
1828
|
+
reqStepName = None
|
|
1829
|
+
for reqStep, values in viewitems(stepMapping):
|
|
1830
|
+
if stepName == values[1]:
|
|
1831
|
+
reqStepName = reqStep
|
|
1832
|
+
if not reqStepName:
|
|
1833
|
+
# I have no idea which cmsRun is that...
|
|
1834
|
+
return None, None, None
|
|
1835
|
+
|
|
1836
|
+
era = self._getKeyValue('AcquisitionEra', reqStepName, dictValues)
|
|
1837
|
+
if not era:
|
|
1838
|
+
era = self.getAcquisitionEra()
|
|
1839
|
+
procstr = self._getKeyValue('ProcessingString', reqStepName, dictValues)
|
|
1840
|
+
if not procstr:
|
|
1841
|
+
procstr = self.getProcessingString()
|
|
1842
|
+
procver = self._getKeyValue('ProcessingVersion', reqStepName, dictValues)
|
|
1843
|
+
if not procver:
|
|
1844
|
+
procver = self.getProcessingVersion()
|
|
1845
|
+
|
|
1846
|
+
return era, procstr, procver
|
|
1847
|
+
|
|
1848
|
+
def updateLFNsAndDatasets(self, runNumber=None, dictValues=None, stepMapping=None):
|
|
1849
|
+
"""
|
|
1850
|
+
_updateLFNsAndDatasets_
|
|
1851
|
+
|
|
1852
|
+
Update all the output LFNs and data names for all tasks in the workflow.
|
|
1853
|
+
This needs to be called after updating the acquisition era, processing
|
|
1854
|
+
version or merged/unmerged lfn base.
|
|
1855
|
+
"""
|
|
1856
|
+
mergedLFNBase, unmergedLFNBase = self._getLFNBase()
|
|
1857
|
+
taskType = self.taskType()
|
|
1858
|
+
|
|
1859
|
+
for stepName in self.listAllStepNames():
|
|
1860
|
+
stepHelper = self.getStepHelper(stepName)
|
|
1861
|
+
|
|
1862
|
+
if stepHelper.stepType() == "CMSSW":
|
|
1863
|
+
if dictValues and stepMapping:
|
|
1864
|
+
# if it's a StepChain, then cast a dark spell on it
|
|
1865
|
+
acqera, procstr, procver = self._updateLFNsStepChain(stepName, dictValues, stepMapping)
|
|
1866
|
+
else:
|
|
1867
|
+
acqera = self.getAcquisitionEra()
|
|
1868
|
+
procstr = self.getProcessingString()
|
|
1869
|
+
procver = self.getProcessingVersion()
|
|
1870
|
+
|
|
1871
|
+
for outputModuleName in stepHelper.listOutputModules():
|
|
1872
|
+
outputModule = stepHelper.getOutputModule(outputModuleName)
|
|
1873
|
+
filterName = getattr(outputModule, "filterName", None)
|
|
1874
|
+
|
|
1875
|
+
if procstr:
|
|
1876
|
+
processingEra = "%s-v%i" % (procstr, procver)
|
|
1877
|
+
else:
|
|
1878
|
+
processingEra = "v%i" % procver
|
|
1879
|
+
if filterName:
|
|
1880
|
+
processedDataset = "%s-%s-%s" % (acqera, filterName, processingEra)
|
|
1881
|
+
processingString = "%s-%s" % (filterName, processingEra)
|
|
1882
|
+
else:
|
|
1883
|
+
processedDataset = "%s-%s" % (acqera, processingEra)
|
|
1884
|
+
processingString = processingEra
|
|
1885
|
+
|
|
1886
|
+
unmergedLFN = "%s/%s/%s/%s/%s" % (unmergedLFNBase,
|
|
1887
|
+
acqera,
|
|
1888
|
+
getattr(outputModule, "primaryDataset"),
|
|
1889
|
+
getattr(outputModule, "dataTier"),
|
|
1890
|
+
processingString)
|
|
1891
|
+
mergedLFN = "%s/%s/%s/%s/%s" % (mergedLFNBase,
|
|
1892
|
+
acqera,
|
|
1893
|
+
getattr(outputModule, "primaryDataset"),
|
|
1894
|
+
getattr(outputModule, "dataTier"),
|
|
1895
|
+
processingString)
|
|
1896
|
+
|
|
1897
|
+
if runNumber is not None and runNumber > 0:
|
|
1898
|
+
runString = str(runNumber).zfill(9)
|
|
1899
|
+
lfnSuffix = "/%s/%s/%s" % (runString[0:3],
|
|
1900
|
+
runString[3:6],
|
|
1901
|
+
runString[6:9])
|
|
1902
|
+
unmergedLFN += lfnSuffix
|
|
1903
|
+
mergedLFN += lfnSuffix
|
|
1904
|
+
|
|
1905
|
+
lfnBase(unmergedLFN)
|
|
1906
|
+
lfnBase(mergedLFN)
|
|
1907
|
+
setattr(outputModule, "processedDataset", processedDataset)
|
|
1908
|
+
|
|
1909
|
+
# For merge tasks, we want all output to go to the merged LFN base.
|
|
1910
|
+
if taskType == "Merge":
|
|
1911
|
+
setattr(outputModule, "lfnBase", mergedLFN)
|
|
1912
|
+
setattr(outputModule, "mergedLFNBase", mergedLFN)
|
|
1913
|
+
|
|
1914
|
+
if getattr(outputModule, "dataTier") in ["DQM", "DQMIO"]:
|
|
1915
|
+
datasetName = "/%s/%s/%s" % (getattr(outputModule, "primaryDataset"),
|
|
1916
|
+
processedDataset,
|
|
1917
|
+
getattr(outputModule, "dataTier"))
|
|
1918
|
+
self.updateDatasetName(datasetName)
|
|
1919
|
+
else:
|
|
1920
|
+
setattr(outputModule, "lfnBase", unmergedLFN)
|
|
1921
|
+
setattr(outputModule, "mergedLFNBase", mergedLFN)
|
|
1922
|
+
|
|
1923
|
+
self.setTaskLogBaseLFN(unmergedLFNBase)
|
|
1924
|
+
|
|
1925
|
+
# do the samething for all the child
|
|
1926
|
+
for task in self.childTaskIterator():
|
|
1927
|
+
task.updateLFNsAndDatasets(runNumber=runNumber)
|
|
1928
|
+
|
|
1929
|
+
return
|
|
1930
|
+
|
|
1931
|
+
def updateDatasetName(self, datasetName):
|
|
1932
|
+
"""
|
|
1933
|
+
_updateDatasetName_
|
|
1934
|
+
|
|
1935
|
+
Updates the dataset name argument of the mergeTask's harvesting
|
|
1936
|
+
children tasks
|
|
1937
|
+
"""
|
|
1938
|
+
for task in self.childTaskIterator():
|
|
1939
|
+
if task.taskType() == "Harvesting":
|
|
1940
|
+
for stepName in task.listAllStepNames():
|
|
1941
|
+
stepHelper = task.getStepHelper(stepName)
|
|
1942
|
+
|
|
1943
|
+
if stepHelper.stepType() == "CMSSW":
|
|
1944
|
+
cmsswHelper = stepHelper.getTypeHelper()
|
|
1945
|
+
cmsswHelper.setDatasetName(datasetName)
|
|
1946
|
+
|
|
1947
|
+
return
|
|
1948
|
+
|
|
1949
|
+
|
|
1950
|
+
class WMTask(ConfigSectionTree):
|
|
1951
|
+
"""
|
|
1952
|
+
_WMTask_
|
|
1953
|
+
|
|
1954
|
+
workload management task.
|
|
1955
|
+
Allow a set of processing job specifications that are interdependent
|
|
1956
|
+
to be modelled as a tree structure.
|
|
1957
|
+
|
|
1958
|
+
"""
|
|
1959
|
+
|
|
1960
|
+
def __init__(self, name):
|
|
1961
|
+
ConfigSectionTree.__init__(self, name)
|
|
1962
|
+
self.objectType = self.__class__.__name__
|
|
1963
|
+
self.pathName = None
|
|
1964
|
+
self.taskType = None
|
|
1965
|
+
self.prepID = None
|
|
1966
|
+
self.section_("steps")
|
|
1967
|
+
self.steps.topStepName = None
|
|
1968
|
+
self.section_("parameters")
|
|
1969
|
+
self.section_("pythonLibs")
|
|
1970
|
+
self.section_("constraints")
|
|
1971
|
+
self.section_("input")
|
|
1972
|
+
self.section_("notifications")
|
|
1973
|
+
self.section_("subscriptions")
|
|
1974
|
+
self.section_("environment")
|
|
1975
|
+
self.notifications.targets = []
|
|
1976
|
+
self.input.sandbox = None
|
|
1977
|
+
self.input.section_("splitting")
|
|
1978
|
+
self.input.splitting.algorithm = None
|
|
1979
|
+
self.input.splitting.section_("performance")
|
|
1980
|
+
self.constraints.section_("sites")
|
|
1981
|
+
self.constraints.sites.whitelist = []
|
|
1982
|
+
self.constraints.sites.blacklist = []
|
|
1983
|
+
self.constraints.sites.trustlists = False
|
|
1984
|
+
self.constraints.sites.trustPUlists = False
|
|
1985
|
+
self.subscriptions.outputSubs = []
|
|
1986
|
+
self.input.section_("WMBS")
|
|
1987
|
+
|
|
1988
|
+
|
|
1989
|
+
def makeWMTask(taskName):
|
|
1990
|
+
"""
|
|
1991
|
+
_makeWMTask_
|
|
1992
|
+
|
|
1993
|
+
Convienience method to instantiate a new WMTask with the name
|
|
1994
|
+
provided and wrap it in a helper
|
|
1995
|
+
|
|
1996
|
+
"""
|
|
1997
|
+
return WMTaskHelper(WMTask(taskName))
|