wmglobalqueue 2.4.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- Utils/CPMetrics.py +270 -0
- Utils/CertTools.py +100 -0
- Utils/EmailAlert.py +50 -0
- Utils/ExtendedUnitTestCase.py +62 -0
- Utils/FileTools.py +182 -0
- Utils/IteratorTools.py +80 -0
- Utils/MathUtils.py +31 -0
- Utils/MemoryCache.py +119 -0
- Utils/Patterns.py +24 -0
- Utils/Pipeline.py +137 -0
- Utils/PortForward.py +97 -0
- Utils/ProcFS.py +112 -0
- Utils/ProcessStats.py +194 -0
- Utils/PythonVersion.py +17 -0
- Utils/Signals.py +36 -0
- Utils/TemporaryEnvironment.py +27 -0
- Utils/Throttled.py +227 -0
- Utils/Timers.py +130 -0
- Utils/Timestamps.py +86 -0
- Utils/TokenManager.py +143 -0
- Utils/Tracing.py +60 -0
- Utils/TwPrint.py +98 -0
- Utils/Utilities.py +318 -0
- Utils/__init__.py +11 -0
- Utils/wmcoreDTools.py +707 -0
- WMCore/ACDC/Collection.py +57 -0
- WMCore/ACDC/CollectionTypes.py +12 -0
- WMCore/ACDC/CouchCollection.py +67 -0
- WMCore/ACDC/CouchFileset.py +238 -0
- WMCore/ACDC/CouchService.py +73 -0
- WMCore/ACDC/DataCollectionService.py +485 -0
- WMCore/ACDC/Fileset.py +94 -0
- WMCore/ACDC/__init__.py +11 -0
- WMCore/Algorithms/Alarm.py +39 -0
- WMCore/Algorithms/MathAlgos.py +274 -0
- WMCore/Algorithms/MiscAlgos.py +67 -0
- WMCore/Algorithms/ParseXMLFile.py +115 -0
- WMCore/Algorithms/Permissions.py +27 -0
- WMCore/Algorithms/Singleton.py +58 -0
- WMCore/Algorithms/SubprocessAlgos.py +129 -0
- WMCore/Algorithms/__init__.py +7 -0
- WMCore/Cache/GenericDataCache.py +98 -0
- WMCore/Cache/WMConfigCache.py +572 -0
- WMCore/Cache/__init__.py +0 -0
- WMCore/Configuration.py +659 -0
- WMCore/DAOFactory.py +47 -0
- WMCore/DataStructs/File.py +177 -0
- WMCore/DataStructs/Fileset.py +140 -0
- WMCore/DataStructs/Job.py +182 -0
- WMCore/DataStructs/JobGroup.py +142 -0
- WMCore/DataStructs/JobPackage.py +49 -0
- WMCore/DataStructs/LumiList.py +734 -0
- WMCore/DataStructs/Mask.py +219 -0
- WMCore/DataStructs/MathStructs/ContinuousSummaryHistogram.py +197 -0
- WMCore/DataStructs/MathStructs/DiscreteSummaryHistogram.py +92 -0
- WMCore/DataStructs/MathStructs/SummaryHistogram.py +117 -0
- WMCore/DataStructs/MathStructs/__init__.py +0 -0
- WMCore/DataStructs/Pickleable.py +24 -0
- WMCore/DataStructs/Run.py +256 -0
- WMCore/DataStructs/Subscription.py +175 -0
- WMCore/DataStructs/WMObject.py +47 -0
- WMCore/DataStructs/WorkUnit.py +112 -0
- WMCore/DataStructs/Workflow.py +60 -0
- WMCore/DataStructs/__init__.py +8 -0
- WMCore/Database/CMSCouch.py +1430 -0
- WMCore/Database/ConfigDBMap.py +29 -0
- WMCore/Database/CouchMonitoring.py +450 -0
- WMCore/Database/CouchUtils.py +118 -0
- WMCore/Database/DBCore.py +198 -0
- WMCore/Database/DBCreator.py +113 -0
- WMCore/Database/DBExceptionHandler.py +59 -0
- WMCore/Database/DBFactory.py +117 -0
- WMCore/Database/DBFormatter.py +177 -0
- WMCore/Database/Dialects.py +13 -0
- WMCore/Database/ExecuteDAO.py +327 -0
- WMCore/Database/MongoDB.py +241 -0
- WMCore/Database/MySQL/Destroy.py +42 -0
- WMCore/Database/MySQL/ListUserContent.py +20 -0
- WMCore/Database/MySQL/__init__.py +9 -0
- WMCore/Database/MySQLCore.py +132 -0
- WMCore/Database/Oracle/Destroy.py +56 -0
- WMCore/Database/Oracle/ListUserContent.py +19 -0
- WMCore/Database/Oracle/__init__.py +9 -0
- WMCore/Database/ResultSet.py +44 -0
- WMCore/Database/Transaction.py +91 -0
- WMCore/Database/__init__.py +9 -0
- WMCore/Database/ipy_profile_couch.py +438 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/CleanUpTask.py +29 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/HeartbeatMonitor.py +105 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/LocationUpdateTask.py +28 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/ReqMgrInteractionTask.py +35 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/__init__.py +0 -0
- WMCore/GlobalWorkQueue/__init__.py +0 -0
- WMCore/GroupUser/CouchObject.py +127 -0
- WMCore/GroupUser/Decorators.py +51 -0
- WMCore/GroupUser/Group.py +33 -0
- WMCore/GroupUser/Interface.py +73 -0
- WMCore/GroupUser/User.py +96 -0
- WMCore/GroupUser/__init__.py +11 -0
- WMCore/Lexicon.py +836 -0
- WMCore/REST/Auth.py +202 -0
- WMCore/REST/CherryPyPeriodicTask.py +166 -0
- WMCore/REST/Error.py +333 -0
- WMCore/REST/Format.py +642 -0
- WMCore/REST/HeartbeatMonitorBase.py +90 -0
- WMCore/REST/Main.py +636 -0
- WMCore/REST/Server.py +2435 -0
- WMCore/REST/Services.py +24 -0
- WMCore/REST/Test.py +120 -0
- WMCore/REST/Tools.py +38 -0
- WMCore/REST/Validation.py +250 -0
- WMCore/REST/__init__.py +1 -0
- WMCore/ReqMgr/DataStructs/RequestStatus.py +209 -0
- WMCore/ReqMgr/DataStructs/RequestType.py +13 -0
- WMCore/ReqMgr/DataStructs/__init__.py +0 -0
- WMCore/ReqMgr/__init__.py +1 -0
- WMCore/Services/AlertManager/AlertManagerAPI.py +111 -0
- WMCore/Services/AlertManager/__init__.py +0 -0
- WMCore/Services/CRIC/CRIC.py +238 -0
- WMCore/Services/CRIC/__init__.py +0 -0
- WMCore/Services/DBS/DBS3Reader.py +1044 -0
- WMCore/Services/DBS/DBSConcurrency.py +44 -0
- WMCore/Services/DBS/DBSErrors.py +112 -0
- WMCore/Services/DBS/DBSReader.py +23 -0
- WMCore/Services/DBS/DBSUtils.py +166 -0
- WMCore/Services/DBS/DBSWriterObjects.py +381 -0
- WMCore/Services/DBS/ProdException.py +133 -0
- WMCore/Services/DBS/__init__.py +8 -0
- WMCore/Services/FWJRDB/FWJRDBAPI.py +118 -0
- WMCore/Services/FWJRDB/__init__.py +0 -0
- WMCore/Services/HTTPS/HTTPSAuthHandler.py +66 -0
- WMCore/Services/HTTPS/__init__.py +0 -0
- WMCore/Services/LogDB/LogDB.py +201 -0
- WMCore/Services/LogDB/LogDBBackend.py +191 -0
- WMCore/Services/LogDB/LogDBExceptions.py +11 -0
- WMCore/Services/LogDB/LogDBReport.py +85 -0
- WMCore/Services/LogDB/__init__.py +0 -0
- WMCore/Services/MSPileup/__init__.py +0 -0
- WMCore/Services/MSUtils/MSUtils.py +54 -0
- WMCore/Services/MSUtils/__init__.py +0 -0
- WMCore/Services/McM/McM.py +173 -0
- WMCore/Services/McM/__init__.py +8 -0
- WMCore/Services/MonIT/Grafana.py +133 -0
- WMCore/Services/MonIT/__init__.py +0 -0
- WMCore/Services/PyCondor/PyCondorAPI.py +154 -0
- WMCore/Services/PyCondor/__init__.py +0 -0
- WMCore/Services/ReqMgr/ReqMgr.py +261 -0
- WMCore/Services/ReqMgr/__init__.py +0 -0
- WMCore/Services/ReqMgrAux/ReqMgrAux.py +419 -0
- WMCore/Services/ReqMgrAux/__init__.py +0 -0
- WMCore/Services/RequestDB/RequestDBReader.py +267 -0
- WMCore/Services/RequestDB/RequestDBWriter.py +39 -0
- WMCore/Services/RequestDB/__init__.py +0 -0
- WMCore/Services/Requests.py +624 -0
- WMCore/Services/Rucio/Rucio.py +1290 -0
- WMCore/Services/Rucio/RucioUtils.py +74 -0
- WMCore/Services/Rucio/__init__.py +0 -0
- WMCore/Services/RucioConMon/RucioConMon.py +121 -0
- WMCore/Services/RucioConMon/__init__.py +0 -0
- WMCore/Services/Service.py +400 -0
- WMCore/Services/StompAMQ/__init__.py +0 -0
- WMCore/Services/TagCollector/TagCollector.py +155 -0
- WMCore/Services/TagCollector/XMLUtils.py +98 -0
- WMCore/Services/TagCollector/__init__.py +0 -0
- WMCore/Services/UUIDLib.py +13 -0
- WMCore/Services/UserFileCache/UserFileCache.py +160 -0
- WMCore/Services/UserFileCache/__init__.py +8 -0
- WMCore/Services/WMAgent/WMAgent.py +63 -0
- WMCore/Services/WMAgent/__init__.py +0 -0
- WMCore/Services/WMArchive/CMSSWMetrics.py +526 -0
- WMCore/Services/WMArchive/DataMap.py +463 -0
- WMCore/Services/WMArchive/WMArchive.py +33 -0
- WMCore/Services/WMArchive/__init__.py +0 -0
- WMCore/Services/WMBS/WMBS.py +97 -0
- WMCore/Services/WMBS/__init__.py +0 -0
- WMCore/Services/WMStats/DataStruct/RequestInfoCollection.py +300 -0
- WMCore/Services/WMStats/DataStruct/__init__.py +0 -0
- WMCore/Services/WMStats/WMStatsPycurl.py +145 -0
- WMCore/Services/WMStats/WMStatsReader.py +445 -0
- WMCore/Services/WMStats/WMStatsWriter.py +273 -0
- WMCore/Services/WMStats/__init__.py +0 -0
- WMCore/Services/WMStatsServer/WMStatsServer.py +134 -0
- WMCore/Services/WMStatsServer/__init__.py +0 -0
- WMCore/Services/WorkQueue/WorkQueue.py +492 -0
- WMCore/Services/WorkQueue/__init__.py +0 -0
- WMCore/Services/__init__.py +8 -0
- WMCore/Services/pycurl_manager.py +574 -0
- WMCore/WMBase.py +50 -0
- WMCore/WMConnectionBase.py +164 -0
- WMCore/WMException.py +183 -0
- WMCore/WMExceptions.py +269 -0
- WMCore/WMFactory.py +76 -0
- WMCore/WMInit.py +377 -0
- WMCore/WMLogging.py +104 -0
- WMCore/WMSpec/ConfigSectionTree.py +442 -0
- WMCore/WMSpec/Persistency.py +135 -0
- WMCore/WMSpec/Steps/BuildMaster.py +87 -0
- WMCore/WMSpec/Steps/BuildTools.py +201 -0
- WMCore/WMSpec/Steps/Builder.py +97 -0
- WMCore/WMSpec/Steps/Diagnostic.py +89 -0
- WMCore/WMSpec/Steps/Emulator.py +62 -0
- WMCore/WMSpec/Steps/ExecuteMaster.py +208 -0
- WMCore/WMSpec/Steps/Executor.py +210 -0
- WMCore/WMSpec/Steps/StepFactory.py +213 -0
- WMCore/WMSpec/Steps/TaskEmulator.py +75 -0
- WMCore/WMSpec/Steps/Template.py +204 -0
- WMCore/WMSpec/Steps/Templates/AlcaHarvest.py +76 -0
- WMCore/WMSpec/Steps/Templates/CMSSW.py +613 -0
- WMCore/WMSpec/Steps/Templates/DQMUpload.py +59 -0
- WMCore/WMSpec/Steps/Templates/DeleteFiles.py +70 -0
- WMCore/WMSpec/Steps/Templates/LogArchive.py +84 -0
- WMCore/WMSpec/Steps/Templates/LogCollect.py +105 -0
- WMCore/WMSpec/Steps/Templates/StageOut.py +105 -0
- WMCore/WMSpec/Steps/Templates/__init__.py +10 -0
- WMCore/WMSpec/Steps/WMExecutionFailure.py +21 -0
- WMCore/WMSpec/Steps/__init__.py +8 -0
- WMCore/WMSpec/Utilities.py +63 -0
- WMCore/WMSpec/WMSpecErrors.py +12 -0
- WMCore/WMSpec/WMStep.py +347 -0
- WMCore/WMSpec/WMTask.py +1997 -0
- WMCore/WMSpec/WMWorkload.py +2288 -0
- WMCore/WMSpec/WMWorkloadTools.py +382 -0
- WMCore/WMSpec/__init__.py +9 -0
- WMCore/WorkQueue/DataLocationMapper.py +273 -0
- WMCore/WorkQueue/DataStructs/ACDCBlock.py +47 -0
- WMCore/WorkQueue/DataStructs/Block.py +48 -0
- WMCore/WorkQueue/DataStructs/CouchWorkQueueElement.py +148 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElement.py +274 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElementResult.py +152 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElementsSummary.py +185 -0
- WMCore/WorkQueue/DataStructs/__init__.py +0 -0
- WMCore/WorkQueue/Policy/End/EndPolicyInterface.py +44 -0
- WMCore/WorkQueue/Policy/End/SingleShot.py +22 -0
- WMCore/WorkQueue/Policy/End/__init__.py +32 -0
- WMCore/WorkQueue/Policy/PolicyInterface.py +17 -0
- WMCore/WorkQueue/Policy/Start/Block.py +258 -0
- WMCore/WorkQueue/Policy/Start/Dataset.py +180 -0
- WMCore/WorkQueue/Policy/Start/MonteCarlo.py +131 -0
- WMCore/WorkQueue/Policy/Start/ResubmitBlock.py +171 -0
- WMCore/WorkQueue/Policy/Start/StartPolicyInterface.py +316 -0
- WMCore/WorkQueue/Policy/Start/__init__.py +34 -0
- WMCore/WorkQueue/Policy/__init__.py +57 -0
- WMCore/WorkQueue/WMBSHelper.py +772 -0
- WMCore/WorkQueue/WorkQueue.py +1237 -0
- WMCore/WorkQueue/WorkQueueBackend.py +750 -0
- WMCore/WorkQueue/WorkQueueBase.py +39 -0
- WMCore/WorkQueue/WorkQueueExceptions.py +44 -0
- WMCore/WorkQueue/WorkQueueReqMgrInterface.py +278 -0
- WMCore/WorkQueue/WorkQueueUtils.py +130 -0
- WMCore/WorkQueue/__init__.py +13 -0
- WMCore/Wrappers/JsonWrapper/JSONThunker.py +342 -0
- WMCore/Wrappers/JsonWrapper/__init__.py +7 -0
- WMCore/Wrappers/__init__.py +6 -0
- WMCore/__init__.py +10 -0
- wmglobalqueue-2.4.5.1.data/data/bin/wmc-dist-patch +15 -0
- wmglobalqueue-2.4.5.1.data/data/bin/wmc-dist-unpatch +8 -0
- wmglobalqueue-2.4.5.1.data/data/bin/wmc-httpd +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/.couchapprc +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/README.md +40 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/index.html +264 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/ElementInfoByWorkflow.js +96 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/StuckElementInfo.js +57 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/WorkloadInfoTable.js +80 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/dataTable.js +70 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/js/namespace.js +23 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/_attachments/style/main.css +75 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/couchapp.json +4 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/filters/childQueueFilter.js +13 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/filters/filterDeletedDocs.js +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/filters/queueFilter.js +11 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/language +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lib/mustache.js +333 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lib/validate.js +27 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lib/workqueue_utils.js +61 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/elementsDetail.js +28 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/filter.js +86 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/stuckElements.js +38 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/workRestrictions.js +153 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/lists/workflowSummary.js +28 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/rewrites.json +73 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/shows/redirect.js +23 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/shows/status.js +40 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/ElementSummaryByWorkflow.html +27 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/StuckElementSummary.html +26 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/TaskStatus.html +23 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/WorkflowSummary.html +27 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/partials/workqueue-common-lib.html +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib-remote.html +16 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib.html +18 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/updates/in-place.js +50 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/validate_doc_update.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.couch.app.js +235 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.pathbinder.js +173 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeData/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeParentData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activeParentData/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activePileupData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/activePileupData/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/analyticsData/map.js +11 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/analyticsData/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/availableByPriority/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/conflicts/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elements/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByParent/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByParentData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByPileupData/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByStatus/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsBySubscription/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/map.js +8 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/reduce.js +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/elementsDetailByWorkflowAndStatus/map.js +26 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/map.js +10 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatus/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatus/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/openRequests/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/recent-items/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/reduce.js +1 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/specsByWorkflow/map.js +5 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/stuckElements/map.js +38 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/map.js +12 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/reduce.js +3 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrl/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrl/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/map.js +6 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/reduce.js +2 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/workflowSummary/map.js +9 -0
- wmglobalqueue-2.4.5.1.data/data/data/couchapps/WorkQueue/views/workflowSummary/reduce.js +10 -0
- wmglobalqueue-2.4.5.1.dist-info/METADATA +26 -0
- wmglobalqueue-2.4.5.1.dist-info/RECORD +347 -0
- wmglobalqueue-2.4.5.1.dist-info/WHEEL +5 -0
- wmglobalqueue-2.4.5.1.dist-info/licenses/LICENSE +202 -0
- wmglobalqueue-2.4.5.1.dist-info/licenses/NOTICE +16 -0
- wmglobalqueue-2.4.5.1.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,750 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
WorkQueueBackend
|
|
4
|
+
|
|
5
|
+
Interface to WorkQueue persistent storage
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from builtins import object
|
|
9
|
+
from math import ceil
|
|
10
|
+
|
|
11
|
+
from future.utils import viewitems
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import random
|
|
15
|
+
import time
|
|
16
|
+
|
|
17
|
+
from WMCore.Database.CMSCouch import CouchServer, CouchNotFoundError, Document
|
|
18
|
+
from WMCore.Lexicon import sanitizeURL
|
|
19
|
+
from WMCore.WMSpec.WMWorkload import WMWorkloadHelper
|
|
20
|
+
from WMCore.WorkQueue.DataStructs.CouchWorkQueueElement import CouchWorkQueueElement, fixElementConflicts
|
|
21
|
+
from WMCore.WorkQueue.DataStructs.WorkQueueElement import possibleSites
|
|
22
|
+
from WMCore.WorkQueue.WorkQueueExceptions import WorkQueueNoMatchingElements, WorkQueueError
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def formatReply(answer, *items):
|
|
26
|
+
"""Take reply from couch bulk api and format labeling errors etc
|
|
27
|
+
"""
|
|
28
|
+
result, errors = [], []
|
|
29
|
+
for ans in answer:
|
|
30
|
+
if 'error' in ans:
|
|
31
|
+
errors.append(ans)
|
|
32
|
+
continue
|
|
33
|
+
for item in items:
|
|
34
|
+
if item.id == ans['id']:
|
|
35
|
+
item.rev = ans['rev']
|
|
36
|
+
result.append(item)
|
|
37
|
+
break
|
|
38
|
+
return result, errors
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def sortAvailableElements(elementsList):
|
|
42
|
+
"""
|
|
43
|
+
Given a list of workqueue elements dictionary, this function will
|
|
44
|
+
sort them in place, first by their creation time; secondly by their
|
|
45
|
+
priority. In other words, higher priority and older requests will
|
|
46
|
+
be first in the list
|
|
47
|
+
:param elementsList: a list of elements dictionary
|
|
48
|
+
:return: nothing, list is updated in place
|
|
49
|
+
"""
|
|
50
|
+
elementsList.sort(key=lambda element: element['CreationTime'])
|
|
51
|
+
elementsList.sort(key=lambda element: element['Priority'], reverse=True)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class WorkQueueBackend(object):
|
|
55
|
+
"""
|
|
56
|
+
Represents persistent storage for WorkQueue
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def __init__(self, db_url, db_name='workqueue',
|
|
60
|
+
inbox_name=None, parentQueue=None,
|
|
61
|
+
queueUrl=None, logger=None):
|
|
62
|
+
if logger:
|
|
63
|
+
self.logger = logger
|
|
64
|
+
else:
|
|
65
|
+
import logging
|
|
66
|
+
self.logger = logging
|
|
67
|
+
|
|
68
|
+
if inbox_name is None:
|
|
69
|
+
inbox_name = "%s_inbox" % db_name
|
|
70
|
+
|
|
71
|
+
self.server = CouchServer(db_url)
|
|
72
|
+
self.parentCouchUrlWithAuth = parentQueue
|
|
73
|
+
if parentQueue:
|
|
74
|
+
self.parentCouchUrl = sanitizeURL(parentQueue)['url']
|
|
75
|
+
else:
|
|
76
|
+
self.parentCouchUrl = None
|
|
77
|
+
self.db = self.server.connectDatabase(db_name, create=False, size=10000)
|
|
78
|
+
self.hostWithAuth = db_url
|
|
79
|
+
self.inbox = self.server.connectDatabase(inbox_name, create=False, size=10000)
|
|
80
|
+
self.queueUrlWithAuth = queueUrl or (db_url + '/' + db_name)
|
|
81
|
+
self.queueUrl = sanitizeURL(queueUrl or (db_url + '/' + db_name))['url']
|
|
82
|
+
self.eleKey = 'WMCore.WorkQueue.DataStructs.WorkQueueElement.WorkQueueElement'
|
|
83
|
+
|
|
84
|
+
def forceQueueSync(self):
|
|
85
|
+
"""Setup CouchDB replications - used only in tests"""
|
|
86
|
+
self.pullFromParent(continuous=True)
|
|
87
|
+
self.sendToParent(continuous=True)
|
|
88
|
+
|
|
89
|
+
def pullFromParent(self, continuous=True, cancel=False):
|
|
90
|
+
"""Replicate from parent couch - blocking: used only in unit tests"""
|
|
91
|
+
try:
|
|
92
|
+
if self.parentCouchUrlWithAuth and self.queueUrlWithAuth:
|
|
93
|
+
self.logger.info("Forcing pullFromParent from parentCouch: %s to queueUrl %s/%s",
|
|
94
|
+
self.parentCouchUrlWithAuth, self.queueUrlWithAuth, self.inbox.name)
|
|
95
|
+
self.server.replicate(source=self.parentCouchUrlWithAuth,
|
|
96
|
+
destination="%s/%s" % (self.hostWithAuth, self.inbox.name),
|
|
97
|
+
filter='WorkQueue/queueFilter',
|
|
98
|
+
query_params={'childUrl': self.queueUrl,
|
|
99
|
+
'parentUrl': self.parentCouchUrl},
|
|
100
|
+
continuous=continuous,
|
|
101
|
+
cancel=cancel,
|
|
102
|
+
sleepSecs=6)
|
|
103
|
+
except Exception as ex:
|
|
104
|
+
self.logger.warning('Replication from %s failed: %s' % (self.parentCouchUrlWithAuth, str(ex)))
|
|
105
|
+
|
|
106
|
+
def sendToParent(self, continuous=True, cancel=False):
|
|
107
|
+
"""Replicate to parent couch - blocking: used only int test"""
|
|
108
|
+
try:
|
|
109
|
+
if self.parentCouchUrlWithAuth and self.queueUrlWithAuth:
|
|
110
|
+
self.logger.info("Forcing sendToParent from queueUrl %s/%s to parentCouch: %s",
|
|
111
|
+
self.queueUrlWithAuth, self.inbox.name, self.parentCouchUrlWithAuth)
|
|
112
|
+
self.server.replicate(source="%s" % self.inbox.name,
|
|
113
|
+
destination=self.parentCouchUrlWithAuth,
|
|
114
|
+
filter='WorkQueue/queueFilter',
|
|
115
|
+
query_params={'childUrl': self.queueUrl,
|
|
116
|
+
'parentUrl': self.parentCouchUrl},
|
|
117
|
+
continuous=continuous,
|
|
118
|
+
cancel=cancel)
|
|
119
|
+
except Exception as ex:
|
|
120
|
+
self.logger.warning('Replication to %s failed: %s' % (self.parentCouchUrlWithAuth, str(ex)))
|
|
121
|
+
|
|
122
|
+
def getElementsForSplitting(self):
|
|
123
|
+
"""Returns the elements from the inbox that need to be split,
|
|
124
|
+
if WorkflowName specified only return elements to split for that workflow"""
|
|
125
|
+
elements = self.getInboxElements(status='Negotiating')
|
|
126
|
+
specs = {} # cache as may have multiple elements for same spec
|
|
127
|
+
for ele in elements:
|
|
128
|
+
if ele['RequestName'] not in specs:
|
|
129
|
+
wmspec = WMWorkloadHelper()
|
|
130
|
+
wmspec.load(self.parentCouchUrlWithAuth + "/%s/spec" % ele['RequestName'])
|
|
131
|
+
specs[ele['RequestName']] = wmspec
|
|
132
|
+
ele['WMSpec'] = specs[ele['RequestName']]
|
|
133
|
+
del specs
|
|
134
|
+
return elements
|
|
135
|
+
|
|
136
|
+
def insertWMSpec(self, wmspec):
|
|
137
|
+
"""
|
|
138
|
+
Insert WMSpec to backend
|
|
139
|
+
"""
|
|
140
|
+
# Can't save spec to inbox, it needs to be visible to child queues
|
|
141
|
+
# Can't save empty dict so add dummy variable
|
|
142
|
+
dummyValues = {'name': wmspec.name()}
|
|
143
|
+
# change specUrl in spec before saving (otherwise it points to previous url)
|
|
144
|
+
wmspec.setSpecUrl(self.db['host'] + "/%s/%s/spec" % (self.db.name, wmspec.name()))
|
|
145
|
+
return wmspec.saveCouch(self.hostWithAuth, self.db.name, dummyValues)
|
|
146
|
+
|
|
147
|
+
def getWMSpec(self, name):
|
|
148
|
+
"""Get the spec"""
|
|
149
|
+
wmspec = WMWorkloadHelper()
|
|
150
|
+
wmspec.load(self.hostWithAuth + "/%s/%s/spec" % (self.db.name, name))
|
|
151
|
+
return wmspec
|
|
152
|
+
|
|
153
|
+
def insertElements(self, units, parent=None):
|
|
154
|
+
"""
|
|
155
|
+
Insert element to database
|
|
156
|
+
|
|
157
|
+
@param parent is the parent WorkQueueObject these element's belong to.
|
|
158
|
+
i.e. a workflow which has been split
|
|
159
|
+
"""
|
|
160
|
+
if not units:
|
|
161
|
+
return []
|
|
162
|
+
# store spec file separately - assume all elements share same spec
|
|
163
|
+
self.insertWMSpec(units[0]['WMSpec'])
|
|
164
|
+
newUnitsInserted = []
|
|
165
|
+
for unit in units:
|
|
166
|
+
# cast to couch
|
|
167
|
+
if not isinstance(unit, CouchWorkQueueElement):
|
|
168
|
+
unit = CouchWorkQueueElement(self.db, elementParams=dict(unit))
|
|
169
|
+
|
|
170
|
+
if parent:
|
|
171
|
+
unit['ParentQueueId'] = parent.id
|
|
172
|
+
unit['TeamName'] = parent['TeamName']
|
|
173
|
+
unit['WMBSUrl'] = parent['WMBSUrl']
|
|
174
|
+
|
|
175
|
+
if unit._couch.documentExists(unit.id):
|
|
176
|
+
self.logger.info('Element "%s" already exists, skip insertion.' % unit.id)
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
newUnitsInserted.append(unit)
|
|
180
|
+
unit.save()
|
|
181
|
+
# FIXME: this is not performing bulk request, but single document commits(!)
|
|
182
|
+
unit._couch.commit()
|
|
183
|
+
|
|
184
|
+
return newUnitsInserted
|
|
185
|
+
|
|
186
|
+
def createWork(self, spec, **kwargs):
|
|
187
|
+
"""Return the Inbox element for this spec.
|
|
188
|
+
|
|
189
|
+
This does not persist it to the database.
|
|
190
|
+
"""
|
|
191
|
+
kwargs.update({'WMSpec': spec,
|
|
192
|
+
'RequestName': spec.name(),
|
|
193
|
+
'StartPolicy': spec.startPolicyParameters(),
|
|
194
|
+
'EndPolicy': spec.endPolicyParameters(),
|
|
195
|
+
'OpenForNewData': True
|
|
196
|
+
})
|
|
197
|
+
unit = CouchWorkQueueElement(self.inbox, elementParams=kwargs)
|
|
198
|
+
unit.id = spec.name()
|
|
199
|
+
return unit
|
|
200
|
+
|
|
201
|
+
def getElements(self, status=None, elementIDs=None, returnIdOnly=False,
|
|
202
|
+
db=None, loadSpec=False, WorkflowName=None, **elementFilters):
|
|
203
|
+
"""Return elements that match requirements
|
|
204
|
+
|
|
205
|
+
status, elementIDs & filters are 'AND'ed together to filter elements.
|
|
206
|
+
returnIdOnly causes the element not to be loaded and only the id returned
|
|
207
|
+
db is used to specify which database to return from
|
|
208
|
+
loadSpec causes the workflow for each spec to be loaded.
|
|
209
|
+
WorkflowName may be used in the place of RequestName
|
|
210
|
+
"""
|
|
211
|
+
key = []
|
|
212
|
+
if not db:
|
|
213
|
+
db = self.db
|
|
214
|
+
if elementFilters.get('RequestName') and not WorkflowName:
|
|
215
|
+
WorkflowName = elementFilters.pop('RequestName')
|
|
216
|
+
|
|
217
|
+
if elementIDs:
|
|
218
|
+
if elementFilters or status or returnIdOnly:
|
|
219
|
+
msg = "Can't specify extra filters (or return id's) when using element id's with getElements()"
|
|
220
|
+
raise ValueError(msg)
|
|
221
|
+
elements = [CouchWorkQueueElement(db, i).load() for i in elementIDs]
|
|
222
|
+
else:
|
|
223
|
+
options = {'include_docs': True, 'filter': elementFilters, 'idOnly': returnIdOnly, 'reduce': False}
|
|
224
|
+
# filter on workflow or status if possible
|
|
225
|
+
filterName = 'elementsByWorkflow'
|
|
226
|
+
if WorkflowName:
|
|
227
|
+
key.append(WorkflowName)
|
|
228
|
+
elif status:
|
|
229
|
+
filterName = 'elementsByStatus'
|
|
230
|
+
key.append(status)
|
|
231
|
+
elif elementFilters.get('SubscriptionId'):
|
|
232
|
+
key.append(elementFilters['SubscriptionId'])
|
|
233
|
+
filterName = 'elementsBySubscription'
|
|
234
|
+
# add given params to filters
|
|
235
|
+
if status:
|
|
236
|
+
options['filter']['Status'] = status
|
|
237
|
+
if WorkflowName:
|
|
238
|
+
options['filter']['RequestName'] = WorkflowName
|
|
239
|
+
|
|
240
|
+
view = db.loadList('WorkQueue', 'filter', filterName, options, key)
|
|
241
|
+
view = json.loads(view)
|
|
242
|
+
if returnIdOnly:
|
|
243
|
+
return view
|
|
244
|
+
elements = [CouchWorkQueueElement.fromDocument(db, row) for row in view]
|
|
245
|
+
|
|
246
|
+
if loadSpec:
|
|
247
|
+
specs = {} # cache as may have multiple elements for same spec
|
|
248
|
+
for ele in elements:
|
|
249
|
+
if ele['RequestName'] not in specs:
|
|
250
|
+
wmspec = self.getWMSpec(ele['RequestName'])
|
|
251
|
+
specs[ele['RequestName']] = wmspec
|
|
252
|
+
ele['WMSpec'] = specs[ele['RequestName']]
|
|
253
|
+
del specs
|
|
254
|
+
return elements
|
|
255
|
+
|
|
256
|
+
def getInboxElements(self, *args, **kwargs):
|
|
257
|
+
"""
|
|
258
|
+
Return elements from Inbox, supports same semantics as getElements()
|
|
259
|
+
"""
|
|
260
|
+
return self.getElements(*args, db=self.inbox, **kwargs)
|
|
261
|
+
|
|
262
|
+
def getElementsForWorkflow(self, workflow):
|
|
263
|
+
"""Get elements for a workflow"""
|
|
264
|
+
elements = self.db.loadView('WorkQueue', 'elementsByWorkflow',
|
|
265
|
+
{'key': workflow, 'include_docs': True, 'reduce': False})
|
|
266
|
+
return [CouchWorkQueueElement.fromDocument(self.db,
|
|
267
|
+
x['doc'])
|
|
268
|
+
for x in elements.get('rows', [])]
|
|
269
|
+
|
|
270
|
+
def getElementsForParent(self, parent):
|
|
271
|
+
"""Get elements with the given parent"""
|
|
272
|
+
elements = self.db.loadView('WorkQueue', 'elementsByParent', {'key': parent.id, 'include_docs': True})
|
|
273
|
+
return [CouchWorkQueueElement.fromDocument(self.db,
|
|
274
|
+
x['doc'])
|
|
275
|
+
for x in elements.get('rows', [])]
|
|
276
|
+
|
|
277
|
+
def saveElements(self, *elements):
|
|
278
|
+
"""Persist elements
|
|
279
|
+
|
|
280
|
+
Returns elements successfully saved, user must verify to catch errors
|
|
281
|
+
"""
|
|
282
|
+
result = []
|
|
283
|
+
if not elements:
|
|
284
|
+
return result
|
|
285
|
+
for element in elements:
|
|
286
|
+
element.save()
|
|
287
|
+
answer = elements[0]._couch.commit()
|
|
288
|
+
result, failures = formatReply(answer, *elements)
|
|
289
|
+
msg = 'Couch error saving element: "%s", error "%s", reason "%s"'
|
|
290
|
+
for failed in failures:
|
|
291
|
+
self.logger.error(msg % (failed['id'], failed['error'], failed['reason']))
|
|
292
|
+
return result
|
|
293
|
+
|
|
294
|
+
def _raiseConflictErrorAndLog(self, conflictIDs, updatedParams, dbName="workqueue"):
|
|
295
|
+
errorMsg = "Need to update this element manually from %s\n ids:%s\n, parameters:%s\n" % (
|
|
296
|
+
dbName, conflictIDs, updatedParams)
|
|
297
|
+
self.logger.error(errorMsg)
|
|
298
|
+
raise WorkQueueError(errorMsg)
|
|
299
|
+
|
|
300
|
+
def updateElements(self, *elementIds, **updatedParams):
|
|
301
|
+
"""Update given element's (identified by id) with new parameters"""
|
|
302
|
+
if not elementIds:
|
|
303
|
+
return
|
|
304
|
+
eleParams = {}
|
|
305
|
+
eleParams[self.eleKey] = updatedParams
|
|
306
|
+
conflictIDs = self.db.updateBulkDocumentsWithConflictHandle(elementIds, eleParams)
|
|
307
|
+
if conflictIDs:
|
|
308
|
+
self._raiseConflictErrorAndLog(conflictIDs, updatedParams)
|
|
309
|
+
return
|
|
310
|
+
|
|
311
|
+
def updateInboxElements(self, *elementIds, **updatedParams):
|
|
312
|
+
"""Update given inbox element's (identified by id) with new parameters"""
|
|
313
|
+
if not elementIds:
|
|
314
|
+
return
|
|
315
|
+
eleParams = {}
|
|
316
|
+
eleParams[self.eleKey] = updatedParams
|
|
317
|
+
conflictIDs = self.inbox.updateBulkDocumentsWithConflictHandle(elementIds, eleParams)
|
|
318
|
+
if conflictIDs:
|
|
319
|
+
self._raiseConflictErrorAndLog(conflictIDs, updatedParams, "workqueue_inbox")
|
|
320
|
+
return
|
|
321
|
+
|
|
322
|
+
def deleteElements(self, *elements):
|
|
323
|
+
"""Delete elements"""
|
|
324
|
+
if not elements:
|
|
325
|
+
return
|
|
326
|
+
specs = {}
|
|
327
|
+
for i in elements:
|
|
328
|
+
i.delete()
|
|
329
|
+
specs[i['RequestName']] = None
|
|
330
|
+
answer = elements[0]._couch.commit()
|
|
331
|
+
_, failures = formatReply(answer, *elements)
|
|
332
|
+
msg = 'Couch error deleting element: "%s", error "%s", reason "%s"'
|
|
333
|
+
for failed in failures:
|
|
334
|
+
# only count delete as failed if document still exists
|
|
335
|
+
if elements[0]._couch.documentExists(failed['id']):
|
|
336
|
+
self.logger.error(msg % (failed['id'], failed['error'], failed['reason']))
|
|
337
|
+
# delete specs if no longer used
|
|
338
|
+
for wf in specs:
|
|
339
|
+
try:
|
|
340
|
+
if not self.db.loadView('WorkQueue', 'elementsByWorkflow',
|
|
341
|
+
{'key': wf, 'limit': 1, 'reduce': False})['rows']:
|
|
342
|
+
self.db.delete_doc(wf)
|
|
343
|
+
except CouchNotFoundError:
|
|
344
|
+
pass
|
|
345
|
+
|
|
346
|
+
def calculateAvailableWork(self, thresholds, siteJobCounts):
|
|
347
|
+
"""
|
|
348
|
+
A short version of the `availableWork` method, which is used only to calculate
|
|
349
|
+
the amount of work already available at the local workqueue.
|
|
350
|
+
:param thresholds: a dictionary key'ed by the site name, values representing the
|
|
351
|
+
maximum number of jobs allowed at that site.
|
|
352
|
+
:param siteJobCounts: a dictionary-of-dictionaries key'ed by the site name; value
|
|
353
|
+
is a dictionary with the number of jobs running at a given priority.
|
|
354
|
+
:return: a tuple with the elements accepted and an overview of job counts per site
|
|
355
|
+
"""
|
|
356
|
+
# NOTE: this method can be less verbose as well
|
|
357
|
+
elements = []
|
|
358
|
+
# If there are no sites, punt early.
|
|
359
|
+
if not thresholds:
|
|
360
|
+
self.logger.error("No thresholds is set: Please check")
|
|
361
|
+
return elements, siteJobCounts
|
|
362
|
+
|
|
363
|
+
self.logger.info("Calculating available work from queue %s", self.queueUrl)
|
|
364
|
+
|
|
365
|
+
options = {}
|
|
366
|
+
options['include_docs'] = True
|
|
367
|
+
options['descending'] = True
|
|
368
|
+
options['resources'] = thresholds
|
|
369
|
+
options['num_elem'] = 9999999 # magic number!
|
|
370
|
+
result = self.db.loadList('WorkQueue', 'workRestrictions', 'availableByPriority', options)
|
|
371
|
+
result = json.loads(result)
|
|
372
|
+
self.logger.info("Retrieved %d elements from workRestrictions list for: %s",
|
|
373
|
+
len(result), self.queueUrl)
|
|
374
|
+
|
|
375
|
+
# Convert python dictionary into Couch WQE objects
|
|
376
|
+
# And sort them by creation time and priority, such that highest priority and
|
|
377
|
+
# oldest elements come first in the list
|
|
378
|
+
sortedElements = []
|
|
379
|
+
for item in result:
|
|
380
|
+
element = CouchWorkQueueElement.fromDocument(self.db, item)
|
|
381
|
+
sortedElements.append(element)
|
|
382
|
+
sortAvailableElements(sortedElements)
|
|
383
|
+
|
|
384
|
+
for element in sortedElements:
|
|
385
|
+
commonSites = possibleSites(element)
|
|
386
|
+
prio = element['Priority']
|
|
387
|
+
# shuffle list of common sites all the time to give everyone the same chance
|
|
388
|
+
random.shuffle(commonSites)
|
|
389
|
+
possibleSite = None
|
|
390
|
+
for site in commonSites:
|
|
391
|
+
if site in thresholds:
|
|
392
|
+
# Count the number of jobs currently running of greater priority, if they
|
|
393
|
+
# are less than the site thresholds, then accept this element
|
|
394
|
+
curJobCount = sum([x[1] if x[0] >= prio else 0 for x in viewitems(siteJobCounts.get(site, {}))])
|
|
395
|
+
self.logger.debug("Job Count: %s, site: %s thresholds: %s", curJobCount, site, thresholds[site])
|
|
396
|
+
if curJobCount < thresholds[site]:
|
|
397
|
+
possibleSite = site
|
|
398
|
+
break
|
|
399
|
+
|
|
400
|
+
if possibleSite:
|
|
401
|
+
self.logger.debug("Meant to accept workflow: %s, with prio: %s, element id: %s, for site: %s",
|
|
402
|
+
element['RequestName'], prio, element.id, possibleSite)
|
|
403
|
+
elements.append(element)
|
|
404
|
+
siteJobCounts.setdefault(possibleSite, {})
|
|
405
|
+
siteJobCounts[possibleSite][prio] = siteJobCounts[possibleSite].setdefault(prio, 0) + \
|
|
406
|
+
element['Jobs'] * element.get('blowupFactor', 1.0)
|
|
407
|
+
else:
|
|
408
|
+
self.logger.debug("No available resources for %s with localdoc id %s",
|
|
409
|
+
element['RequestName'], element.id)
|
|
410
|
+
|
|
411
|
+
self.logger.info("And %d elements passed location and siteJobCounts restrictions for: %s",
|
|
412
|
+
len(elements), self.queueUrl)
|
|
413
|
+
return elements, siteJobCounts
|
|
414
|
+
|
|
415
|
+
def availableWork(self, thresholds, siteJobCounts, team=None, excludeWorkflows=None,
|
|
416
|
+
numElems=1000, rowsPerSlice=1000, maxRows=1000):
|
|
417
|
+
"""
|
|
418
|
+
Get work - either from local or global queue - which is available to be run.
|
|
419
|
+
|
|
420
|
+
:param thresholds: a dictionary key'ed by the site name, values representing the
|
|
421
|
+
maximum number of jobs allowed at that site.
|
|
422
|
+
:param siteJobCounts: a dictionary-of-dictionaries key'ed by the site name; value
|
|
423
|
+
is a dictionary with the number of jobs running at a given priority.
|
|
424
|
+
:param team: a string with the team name we want to pull work for
|
|
425
|
+
:param excludeWorkflows: list of (aborted) workflows that should not be accepted
|
|
426
|
+
:param numElems: integer with the maximum number of elements to be accepted.
|
|
427
|
+
:param rowsPerSlice: integer defining the amount of rows for each slice (slices
|
|
428
|
+
of a couchdb view request).
|
|
429
|
+
:param maxRows: maximum number of available elements (rows) to be considered
|
|
430
|
+
when pulling work down to the agent.
|
|
431
|
+
:return: a tuple with the elements accepted and an overview of job counts per site
|
|
432
|
+
"""
|
|
433
|
+
excludeWorkflows = excludeWorkflows or []
|
|
434
|
+
acceptedElems = []
|
|
435
|
+
# If there are no sites, punt early.
|
|
436
|
+
if not thresholds:
|
|
437
|
+
self.logger.error("No thresholds is set: Please check")
|
|
438
|
+
return acceptedElems, siteJobCounts
|
|
439
|
+
|
|
440
|
+
self.logger.info("Current siteJobCounts:")
|
|
441
|
+
for site, jobsByPrio in viewitems(siteJobCounts):
|
|
442
|
+
self.logger.info(" %s : %s", site, jobsByPrio)
|
|
443
|
+
|
|
444
|
+
# Find out how many elements are in Available status
|
|
445
|
+
numAvail = self.queueLength()
|
|
446
|
+
self.logger.info("Current amount of WQEs in Available status: %s", numAvail)
|
|
447
|
+
|
|
448
|
+
self.logger.info("Getting up to %d available work from %s", numElems, self.queueUrl)
|
|
449
|
+
self.logger.info(" for team name: %s", team)
|
|
450
|
+
self.logger.info(" with excludeWorkflows count: %s", len(excludeWorkflows))
|
|
451
|
+
self.logger.debug(" with excludeWorkflows: %s", excludeWorkflows)
|
|
452
|
+
self.logger.info(" for thresholds: %s", thresholds)
|
|
453
|
+
|
|
454
|
+
options = {}
|
|
455
|
+
options['include_docs'] = True
|
|
456
|
+
options['descending'] = True
|
|
457
|
+
options['resources'] = thresholds
|
|
458
|
+
options['limit'] = rowsPerSlice
|
|
459
|
+
# FIXME: num_elem option can likely be deprecated, but it needs synchronization
|
|
460
|
+
# between agents and global workqueue... for now, make sure it can return the slice size
|
|
461
|
+
options['num_elem'] = rowsPerSlice
|
|
462
|
+
if team:
|
|
463
|
+
options['team'] = team
|
|
464
|
+
|
|
465
|
+
# Fetch workqueue elements in slices, using the CouchDB "limit" and "skip"
|
|
466
|
+
# options for couch views. Conditions to stop this loop are:
|
|
467
|
+
# a) stop once total_rows is reached (exhausted all available elements)
|
|
468
|
+
# b) hit maximum allowed elements/rows to be considered for data acquisition (maxRows)
|
|
469
|
+
# c) or, once the targeted number of elements has been accepted (numElems)
|
|
470
|
+
numSlices = ceil(numAvail / rowsPerSlice)
|
|
471
|
+
numSlices = min(numSlices, int(maxRows / rowsPerSlice))
|
|
472
|
+
for sliceNum in range(numSlices):
|
|
473
|
+
# documents to skip as a function of the slice number
|
|
474
|
+
options['skip'] = sliceNum * rowsPerSlice
|
|
475
|
+
self.logger.info(" for slice: %s, with rows range [%s - %s]",
|
|
476
|
+
sliceNum, options['skip'], options['skip'] + options['limit'])
|
|
477
|
+
|
|
478
|
+
result = self.db.loadList('WorkQueue', 'workRestrictions', 'availableByPriority', options)
|
|
479
|
+
# now check the remaining restrictions and priority
|
|
480
|
+
wqeSlots = numElems - len(acceptedElems)
|
|
481
|
+
elems = self._evalAvailableWork(json.loads(result), thresholds, siteJobCounts,
|
|
482
|
+
excludeWorkflows, wqeSlots)
|
|
483
|
+
acceptedElems.extend(elems)
|
|
484
|
+
if len(acceptedElems) >= numElems:
|
|
485
|
+
msg = f"Reached maximum number of elements to be accepted, "
|
|
486
|
+
msg += f"configured to: {numElems}, from queue: {self.queueUrl}"
|
|
487
|
+
self.logger.info(msg)
|
|
488
|
+
break
|
|
489
|
+
|
|
490
|
+
self.logger.info("Total of %d elements passed location and siteJobCounts restrictions for: %s",
|
|
491
|
+
len(acceptedElems), self.queueUrl)
|
|
492
|
+
return acceptedElems, siteJobCounts
|
|
493
|
+
|
|
494
|
+
def _evalAvailableWork(self, listElems, thresholds, siteJobCounts,
|
|
495
|
+
excludeWorkflows, numElems):
|
|
496
|
+
"""
|
|
497
|
+
Evaluate work available in workqueue and decide whether it can be
|
|
498
|
+
accepted or not.
|
|
499
|
+
|
|
500
|
+
:param listElems: list of dictionaries that correspond to the workqueue elements.
|
|
501
|
+
:param thresholds: a dictionary key'ed by the site name, values representing the
|
|
502
|
+
maximum number of jobs allowed at that site.
|
|
503
|
+
:param siteJobCounts: a dictionary-of-dictionaries key'ed by the site name; value
|
|
504
|
+
is a dictionary with the number of jobs running at a given priority.
|
|
505
|
+
NOTE that it is updated in place.
|
|
506
|
+
:param excludeWorkflows: list of (aborted) workflows that should not be accepted
|
|
507
|
+
:param numElems: integer with the maximum number of elements to be accepted (default
|
|
508
|
+
to a very large number when pulling work from local queue, read unlimited)
|
|
509
|
+
:return: a tuple with the elements accepted and an overview of job counts per site
|
|
510
|
+
"""
|
|
511
|
+
elems = []
|
|
512
|
+
self.logger.info("Retrieved %d elements from workRestrictions list for: %s",
|
|
513
|
+
len(listElems), self.queueUrl)
|
|
514
|
+
# Convert python dictionary into Couch WQE objects, skipping aborted workflows
|
|
515
|
+
# And sort them by creation time and priority, such that highest priority and
|
|
516
|
+
# oldest elements come first in the list
|
|
517
|
+
sortedElements = []
|
|
518
|
+
for i in listElems:
|
|
519
|
+
element = CouchWorkQueueElement.fromDocument(self.db, i)
|
|
520
|
+
# make sure not to acquire work for aborted or force-completed workflows
|
|
521
|
+
if element['RequestName'] in excludeWorkflows:
|
|
522
|
+
msg = "Skipping aborted/force-completed workflow: %s, work id: %s"
|
|
523
|
+
self.logger.info(msg, element['RequestName'], element._id)
|
|
524
|
+
elif element['Status'] != 'Available':
|
|
525
|
+
# Extra safety mechanism, see https://github.com/dmwm/WMCore/pull/12050
|
|
526
|
+
msg = "Skipping element in unwanted status: %s, work id: %s"
|
|
527
|
+
self.logger.warning(msg, element['Status'], element._id)
|
|
528
|
+
else:
|
|
529
|
+
sortedElements.append(element)
|
|
530
|
+
sortAvailableElements(sortedElements)
|
|
531
|
+
|
|
532
|
+
for element in sortedElements:
|
|
533
|
+
if numElems <= 0:
|
|
534
|
+
# it means we accepted the configured number of elements
|
|
535
|
+
break
|
|
536
|
+
commonSites = possibleSites(element)
|
|
537
|
+
prio = element['Priority']
|
|
538
|
+
# shuffle list of common sites all the time to give everyone the same chance
|
|
539
|
+
random.shuffle(commonSites)
|
|
540
|
+
possibleSite = None
|
|
541
|
+
for site in commonSites:
|
|
542
|
+
if site in thresholds:
|
|
543
|
+
# Count the number of jobs currently running of greater priority, if they
|
|
544
|
+
# are less than the site thresholds, then accept this element
|
|
545
|
+
curJobCount = sum([x[1] if x[0] >= prio else 0 for x in viewitems(siteJobCounts.get(site, {}))])
|
|
546
|
+
self.logger.debug("Job Count: %s, site: %s thresholds: %s",
|
|
547
|
+
curJobCount, site, thresholds[site])
|
|
548
|
+
if curJobCount < thresholds[site]:
|
|
549
|
+
possibleSite = site
|
|
550
|
+
break
|
|
551
|
+
|
|
552
|
+
if possibleSite:
|
|
553
|
+
self.logger.info("Accepting workflow: %s, with prio: %s, element id: %s, for site: %s",
|
|
554
|
+
element['RequestName'], prio, element.id, possibleSite)
|
|
555
|
+
numElems -= 1
|
|
556
|
+
elems.append(element)
|
|
557
|
+
siteJobCounts.setdefault(possibleSite, {})
|
|
558
|
+
siteJobCounts[possibleSite][prio] = siteJobCounts[possibleSite].setdefault(prio, 0) + \
|
|
559
|
+
element['Jobs'] * element.get('blowupFactor', 1.0)
|
|
560
|
+
else:
|
|
561
|
+
self.logger.debug("No available resources for %s with doc id %s",
|
|
562
|
+
element['RequestName'], element.id)
|
|
563
|
+
|
|
564
|
+
self.logger.info("And %d elements passed location and siteJobCounts restrictions for: %s",
|
|
565
|
+
len(elems), self.queueUrl)
|
|
566
|
+
return elems
|
|
567
|
+
|
|
568
|
+
def getActiveData(self):
|
|
569
|
+
"""Get data items we have work in the queue for"""
|
|
570
|
+
data = self.db.loadView('WorkQueue', 'activeData', {'reduce': False, 'group': False})
|
|
571
|
+
unique_data = set()
|
|
572
|
+
for row in data.get('rows', []):
|
|
573
|
+
unique_data.add((row['key'][0], row['key'][1]))
|
|
574
|
+
return [{'dbs_url': x[0], 'name': x[1]} for x in unique_data]
|
|
575
|
+
|
|
576
|
+
def getActiveParentData(self):
|
|
577
|
+
"""Get data items we have work in the queue for with parent"""
|
|
578
|
+
data = self.db.loadView('WorkQueue', 'activeParentData', {'reduce': False, 'group': False})
|
|
579
|
+
unique_data = set()
|
|
580
|
+
for row in data.get('rows', []):
|
|
581
|
+
unique_data.add((row['key'][0], row['key'][1]))
|
|
582
|
+
return [{'dbs_url': x[0], 'name': x[1]} for x in unique_data]
|
|
583
|
+
|
|
584
|
+
def getActivePileupData(self):
|
|
585
|
+
"""Get data items we have work in the queue for with pileup"""
|
|
586
|
+
# Due to performance issues, we are stopping the use of reduce and group parameters for this view
|
|
587
|
+
# Further details: https://github.com/dmwm/WMCore/issues/12319
|
|
588
|
+
data = self.db.loadView('WorkQueue', 'activePileupData', {'reduce': False, 'group': False})
|
|
589
|
+
|
|
590
|
+
unique_data = set()
|
|
591
|
+
for row in data.get('rows', []):
|
|
592
|
+
unique_data.add((row['key'][0], row['key'][1]))
|
|
593
|
+
return [{'dbs_url': x[0], 'name': x[1]} for x in unique_data]
|
|
594
|
+
|
|
595
|
+
def getElementsForData(self, data):
|
|
596
|
+
"""Get active elements for this dbs & data combo"""
|
|
597
|
+
elements = self.db.loadView('WorkQueue', 'elementsByData', {'key': data, 'include_docs': True})
|
|
598
|
+
return [CouchWorkQueueElement.fromDocument(self.db,
|
|
599
|
+
x['doc'])
|
|
600
|
+
for x in elements.get('rows', [])]
|
|
601
|
+
|
|
602
|
+
def getElementsForParentData(self, data):
|
|
603
|
+
"""Get active elements for this data """
|
|
604
|
+
elements = self.db.loadView('WorkQueue', 'elementsByParentData', {'key': data, 'include_docs': True})
|
|
605
|
+
return [CouchWorkQueueElement.fromDocument(self.db,
|
|
606
|
+
x['doc'])
|
|
607
|
+
for x in elements.get('rows', [])]
|
|
608
|
+
|
|
609
|
+
def getElementsForPileupData(self, data):
|
|
610
|
+
"""Get active elements for this data """
|
|
611
|
+
elements = self.db.loadView('WorkQueue', 'elementsByPileupData', {'key': data, 'include_docs': True})
|
|
612
|
+
return [CouchWorkQueueElement.fromDocument(self.db,
|
|
613
|
+
x['doc'])
|
|
614
|
+
for x in elements.get('rows', [])]
|
|
615
|
+
|
|
616
|
+
def isAvailable(self):
|
|
617
|
+
"""Is the server available, i.e. up and not compacting"""
|
|
618
|
+
try:
|
|
619
|
+
compacting = self.db.info()['compact_running']
|
|
620
|
+
if compacting:
|
|
621
|
+
self.logger.info("CouchDB compacting - try again later.")
|
|
622
|
+
return False
|
|
623
|
+
except Exception as ex:
|
|
624
|
+
self.logger.error("CouchDB unavailable: %s" % str(ex))
|
|
625
|
+
return False
|
|
626
|
+
return True
|
|
627
|
+
|
|
628
|
+
def getWorkflows(self, includeInbox=False, includeSpecs=False):
|
|
629
|
+
"""Returns workflows known to workqueue"""
|
|
630
|
+
result = set([x['key'] for x in self.db.loadView('WorkQueue', 'elementsByWorkflow', {'group': True})['rows']])
|
|
631
|
+
if includeInbox:
|
|
632
|
+
result = result | set(
|
|
633
|
+
[x['key'] for x in self.inbox.loadView('WorkQueue', 'elementsByWorkflow', {'group': True})['rows']])
|
|
634
|
+
if includeSpecs:
|
|
635
|
+
result = result | set([x['key'] for x in self.db.loadView('WorkQueue', 'specsByWorkflow')['rows']])
|
|
636
|
+
return list(result)
|
|
637
|
+
|
|
638
|
+
def queueLength(self):
|
|
639
|
+
"""Return number of available elements"""
|
|
640
|
+
return self.db.loadView('WorkQueue', 'availableByPriority', {'limit': 0})['total_rows']
|
|
641
|
+
|
|
642
|
+
def fixConflicts(self):
|
|
643
|
+
"""Fix elements in conflict
|
|
644
|
+
|
|
645
|
+
Each local queue runs this to resolve its conflicts with global,
|
|
646
|
+
resolution propagates up to global.
|
|
647
|
+
|
|
648
|
+
Conflicting elements are merged into one element with others deleted.
|
|
649
|
+
|
|
650
|
+
This will fail if elements are modified during the resolution -
|
|
651
|
+
if this happens rerun.
|
|
652
|
+
"""
|
|
653
|
+
for db in [self.inbox, self.db]:
|
|
654
|
+
for row in db.loadView('WorkQueue', 'conflicts')['rows']:
|
|
655
|
+
elementId = row['id']
|
|
656
|
+
try:
|
|
657
|
+
conflicting_elements = [CouchWorkQueueElement.fromDocument(db, db.document(elementId, rev)) \
|
|
658
|
+
for rev in row['value']]
|
|
659
|
+
fixed_elements = fixElementConflicts(*conflicting_elements)
|
|
660
|
+
if self.saveElements(fixed_elements[0]):
|
|
661
|
+
self.saveElements(*fixed_elements[1:]) # delete others (if merged value update accepted)
|
|
662
|
+
except Exception as ex:
|
|
663
|
+
self.logger.error("Error resolving conflict for %s: %s" % (elementId, str(ex)))
|
|
664
|
+
|
|
665
|
+
def recordTaskActivity(self, taskname, comment=''):
|
|
666
|
+
"""Record a task for monitoring"""
|
|
667
|
+
try:
|
|
668
|
+
record = self.db.document('task_activity')
|
|
669
|
+
except CouchNotFoundError:
|
|
670
|
+
record = Document('task_activity')
|
|
671
|
+
record.setdefault('tasks', {})
|
|
672
|
+
record['tasks'].setdefault(taskname, {})
|
|
673
|
+
record['tasks'][taskname]['timestamp'] = time.time()
|
|
674
|
+
record['tasks'][taskname]['comment'] = comment
|
|
675
|
+
try:
|
|
676
|
+
self.db.commitOne(record)
|
|
677
|
+
except Exception as ex:
|
|
678
|
+
self.logger.error("Unable to update task %s freshness: %s" % (taskname, str(ex)))
|
|
679
|
+
|
|
680
|
+
def getWMBSInjectStatus(self, request=None):
|
|
681
|
+
"""
|
|
682
|
+
This service only provided by global queue except on draining agent
|
|
683
|
+
"""
|
|
684
|
+
options = {'group': True, 'reduce': True}
|
|
685
|
+
if request:
|
|
686
|
+
options.update(key=request)
|
|
687
|
+
data = self.db.loadView('WorkQueue', 'wmbsInjectStatusByRequest', options)
|
|
688
|
+
if request:
|
|
689
|
+
if data['rows']:
|
|
690
|
+
injectionStatus = data['rows'][0]['value']
|
|
691
|
+
inboxElement = self.getInboxElements(WorkflowName=request)
|
|
692
|
+
requestOpen = inboxElement[0].get('OpenForNewData', False) if inboxElement else False
|
|
693
|
+
return injectionStatus and not requestOpen
|
|
694
|
+
else:
|
|
695
|
+
raise WorkQueueNoMatchingElements("%s not found" % request)
|
|
696
|
+
else:
|
|
697
|
+
injectionStatus = dict((x['key'], x['value']) for x in data.get('rows', []))
|
|
698
|
+
finalInjectionStatus = []
|
|
699
|
+
for request in injectionStatus:
|
|
700
|
+
inboxElement = self.getInboxElements(WorkflowName=request)
|
|
701
|
+
requestOpen = inboxElement[0].get('OpenForNewData', False) if inboxElement else False
|
|
702
|
+
finalInjectionStatus.append({request: injectionStatus[request] and not requestOpen})
|
|
703
|
+
|
|
704
|
+
return finalInjectionStatus
|
|
705
|
+
|
|
706
|
+
def getWorkflowNames(self, inboxFlag=False):
|
|
707
|
+
"""Get workflow names from workqueue db"""
|
|
708
|
+
if inboxFlag:
|
|
709
|
+
db = self.inbox
|
|
710
|
+
else:
|
|
711
|
+
db = self.db
|
|
712
|
+
data = db.loadView('WorkQueue', 'elementsByWorkflow',
|
|
713
|
+
{'stale': "update_after", 'reduce': True, 'group': True})
|
|
714
|
+
return [x['key'] for x in data.get('rows', [])]
|
|
715
|
+
|
|
716
|
+
def deleteWQElementsByWorkflow(self, workflowNames):
|
|
717
|
+
"""
|
|
718
|
+
delete workqueue elements belongs to given workflow names
|
|
719
|
+
it doen't check the status of workflow so need to be careful to use this.
|
|
720
|
+
Pass only workflows which has the end status
|
|
721
|
+
"""
|
|
722
|
+
deleted = 0
|
|
723
|
+
dbs = [self.db, self.inbox]
|
|
724
|
+
if not isinstance(workflowNames, list):
|
|
725
|
+
workflowNames = [workflowNames]
|
|
726
|
+
|
|
727
|
+
if len(workflowNames) == 0:
|
|
728
|
+
return deleted
|
|
729
|
+
options = {}
|
|
730
|
+
options["stale"] = "update_after"
|
|
731
|
+
options["reduce"] = False
|
|
732
|
+
|
|
733
|
+
idsByWflow = {}
|
|
734
|
+
for couchdb in dbs:
|
|
735
|
+
result = couchdb.loadView("WorkQueue", "elementsByWorkflow", options, workflowNames)
|
|
736
|
+
for entry in result["rows"]:
|
|
737
|
+
idsByWflow.setdefault(entry['key'], [])
|
|
738
|
+
idsByWflow[entry['key']].append(entry['id'])
|
|
739
|
+
for wflow, docIds in viewitems(idsByWflow):
|
|
740
|
+
self.logger.info("Going to delete %d documents in *%s* db for workflow: %s. Doc IDs: %s",
|
|
741
|
+
len(docIds), couchdb.name, wflow, docIds)
|
|
742
|
+
try:
|
|
743
|
+
couchdb.bulkDeleteByIDs(docIds)
|
|
744
|
+
except CouchNotFoundError as exc:
|
|
745
|
+
self.logger.error("Failed to find one of the documents. Error: %s", str(exc))
|
|
746
|
+
deleted += len(docIds)
|
|
747
|
+
# delete the workflow with spec from workqueue db
|
|
748
|
+
for wf in workflowNames:
|
|
749
|
+
self.db.delete_doc(wf)
|
|
750
|
+
return deleted
|