wmglobalqueue 2.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wmglobalqueue might be problematic. Click here for more details.
- Utils/CPMetrics.py +270 -0
- Utils/CertTools.py +62 -0
- Utils/EmailAlert.py +50 -0
- Utils/ExtendedUnitTestCase.py +62 -0
- Utils/FileTools.py +182 -0
- Utils/IteratorTools.py +80 -0
- Utils/MathUtils.py +31 -0
- Utils/MemoryCache.py +119 -0
- Utils/Patterns.py +24 -0
- Utils/Pipeline.py +137 -0
- Utils/PortForward.py +97 -0
- Utils/ProcessStats.py +103 -0
- Utils/PythonVersion.py +17 -0
- Utils/Signals.py +36 -0
- Utils/TemporaryEnvironment.py +27 -0
- Utils/Throttled.py +227 -0
- Utils/Timers.py +130 -0
- Utils/Timestamps.py +86 -0
- Utils/TokenManager.py +143 -0
- Utils/Tracing.py +60 -0
- Utils/TwPrint.py +98 -0
- Utils/Utilities.py +308 -0
- Utils/__init__.py +11 -0
- WMCore/ACDC/Collection.py +57 -0
- WMCore/ACDC/CollectionTypes.py +12 -0
- WMCore/ACDC/CouchCollection.py +67 -0
- WMCore/ACDC/CouchFileset.py +238 -0
- WMCore/ACDC/CouchService.py +73 -0
- WMCore/ACDC/DataCollectionService.py +485 -0
- WMCore/ACDC/Fileset.py +94 -0
- WMCore/ACDC/__init__.py +11 -0
- WMCore/Algorithms/Alarm.py +39 -0
- WMCore/Algorithms/MathAlgos.py +274 -0
- WMCore/Algorithms/MiscAlgos.py +67 -0
- WMCore/Algorithms/ParseXMLFile.py +115 -0
- WMCore/Algorithms/Permissions.py +27 -0
- WMCore/Algorithms/Singleton.py +58 -0
- WMCore/Algorithms/SubprocessAlgos.py +129 -0
- WMCore/Algorithms/__init__.py +7 -0
- WMCore/Cache/GenericDataCache.py +98 -0
- WMCore/Cache/WMConfigCache.py +572 -0
- WMCore/Cache/__init__.py +0 -0
- WMCore/Configuration.py +651 -0
- WMCore/DAOFactory.py +47 -0
- WMCore/DataStructs/File.py +177 -0
- WMCore/DataStructs/Fileset.py +140 -0
- WMCore/DataStructs/Job.py +182 -0
- WMCore/DataStructs/JobGroup.py +142 -0
- WMCore/DataStructs/JobPackage.py +49 -0
- WMCore/DataStructs/LumiList.py +734 -0
- WMCore/DataStructs/Mask.py +219 -0
- WMCore/DataStructs/MathStructs/ContinuousSummaryHistogram.py +197 -0
- WMCore/DataStructs/MathStructs/DiscreteSummaryHistogram.py +92 -0
- WMCore/DataStructs/MathStructs/SummaryHistogram.py +117 -0
- WMCore/DataStructs/MathStructs/__init__.py +0 -0
- WMCore/DataStructs/Pickleable.py +24 -0
- WMCore/DataStructs/Run.py +256 -0
- WMCore/DataStructs/Subscription.py +175 -0
- WMCore/DataStructs/WMObject.py +47 -0
- WMCore/DataStructs/WorkUnit.py +112 -0
- WMCore/DataStructs/Workflow.py +60 -0
- WMCore/DataStructs/__init__.py +8 -0
- WMCore/Database/CMSCouch.py +1349 -0
- WMCore/Database/ConfigDBMap.py +29 -0
- WMCore/Database/CouchUtils.py +118 -0
- WMCore/Database/DBCore.py +198 -0
- WMCore/Database/DBCreator.py +113 -0
- WMCore/Database/DBExceptionHandler.py +57 -0
- WMCore/Database/DBFactory.py +110 -0
- WMCore/Database/DBFormatter.py +177 -0
- WMCore/Database/Dialects.py +13 -0
- WMCore/Database/ExecuteDAO.py +327 -0
- WMCore/Database/MongoDB.py +241 -0
- WMCore/Database/MySQL/Destroy.py +42 -0
- WMCore/Database/MySQL/ListUserContent.py +20 -0
- WMCore/Database/MySQL/__init__.py +9 -0
- WMCore/Database/MySQLCore.py +132 -0
- WMCore/Database/Oracle/Destroy.py +56 -0
- WMCore/Database/Oracle/ListUserContent.py +19 -0
- WMCore/Database/Oracle/__init__.py +9 -0
- WMCore/Database/ResultSet.py +44 -0
- WMCore/Database/Transaction.py +91 -0
- WMCore/Database/__init__.py +9 -0
- WMCore/Database/ipy_profile_couch.py +438 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/CleanUpTask.py +29 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/HeartbeatMonitor.py +105 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/LocationUpdateTask.py +28 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/ReqMgrInteractionTask.py +35 -0
- WMCore/GlobalWorkQueue/CherryPyThreads/__init__.py +0 -0
- WMCore/GlobalWorkQueue/__init__.py +0 -0
- WMCore/GroupUser/CouchObject.py +127 -0
- WMCore/GroupUser/Decorators.py +51 -0
- WMCore/GroupUser/Group.py +33 -0
- WMCore/GroupUser/Interface.py +73 -0
- WMCore/GroupUser/User.py +96 -0
- WMCore/GroupUser/__init__.py +11 -0
- WMCore/Lexicon.py +836 -0
- WMCore/REST/Auth.py +202 -0
- WMCore/REST/CherryPyPeriodicTask.py +166 -0
- WMCore/REST/Error.py +333 -0
- WMCore/REST/Format.py +642 -0
- WMCore/REST/HeartbeatMonitorBase.py +90 -0
- WMCore/REST/Main.py +623 -0
- WMCore/REST/Server.py +2435 -0
- WMCore/REST/Services.py +24 -0
- WMCore/REST/Test.py +120 -0
- WMCore/REST/Tools.py +38 -0
- WMCore/REST/Validation.py +250 -0
- WMCore/REST/__init__.py +1 -0
- WMCore/ReqMgr/DataStructs/RequestStatus.py +209 -0
- WMCore/ReqMgr/DataStructs/RequestType.py +13 -0
- WMCore/ReqMgr/DataStructs/__init__.py +0 -0
- WMCore/ReqMgr/__init__.py +1 -0
- WMCore/Services/AlertManager/AlertManagerAPI.py +111 -0
- WMCore/Services/AlertManager/__init__.py +0 -0
- WMCore/Services/CRIC/CRIC.py +238 -0
- WMCore/Services/CRIC/__init__.py +0 -0
- WMCore/Services/DBS/DBS3Reader.py +1044 -0
- WMCore/Services/DBS/DBSConcurrency.py +44 -0
- WMCore/Services/DBS/DBSErrors.py +113 -0
- WMCore/Services/DBS/DBSReader.py +23 -0
- WMCore/Services/DBS/DBSUtils.py +139 -0
- WMCore/Services/DBS/DBSWriterObjects.py +381 -0
- WMCore/Services/DBS/ProdException.py +133 -0
- WMCore/Services/DBS/__init__.py +8 -0
- WMCore/Services/FWJRDB/FWJRDBAPI.py +118 -0
- WMCore/Services/FWJRDB/__init__.py +0 -0
- WMCore/Services/HTTPS/HTTPSAuthHandler.py +66 -0
- WMCore/Services/HTTPS/__init__.py +0 -0
- WMCore/Services/LogDB/LogDB.py +201 -0
- WMCore/Services/LogDB/LogDBBackend.py +191 -0
- WMCore/Services/LogDB/LogDBExceptions.py +11 -0
- WMCore/Services/LogDB/LogDBReport.py +85 -0
- WMCore/Services/LogDB/__init__.py +0 -0
- WMCore/Services/MSPileup/__init__.py +0 -0
- WMCore/Services/MSUtils/MSUtils.py +54 -0
- WMCore/Services/MSUtils/__init__.py +0 -0
- WMCore/Services/McM/McM.py +173 -0
- WMCore/Services/McM/__init__.py +8 -0
- WMCore/Services/MonIT/Grafana.py +133 -0
- WMCore/Services/MonIT/__init__.py +0 -0
- WMCore/Services/PyCondor/PyCondorAPI.py +154 -0
- WMCore/Services/PyCondor/PyCondorUtils.py +105 -0
- WMCore/Services/PyCondor/__init__.py +0 -0
- WMCore/Services/ReqMgr/ReqMgr.py +261 -0
- WMCore/Services/ReqMgr/__init__.py +0 -0
- WMCore/Services/ReqMgrAux/ReqMgrAux.py +419 -0
- WMCore/Services/ReqMgrAux/__init__.py +0 -0
- WMCore/Services/RequestDB/RequestDBReader.py +267 -0
- WMCore/Services/RequestDB/RequestDBWriter.py +39 -0
- WMCore/Services/RequestDB/__init__.py +0 -0
- WMCore/Services/Requests.py +624 -0
- WMCore/Services/Rucio/Rucio.py +1287 -0
- WMCore/Services/Rucio/RucioUtils.py +74 -0
- WMCore/Services/Rucio/__init__.py +0 -0
- WMCore/Services/RucioConMon/RucioConMon.py +128 -0
- WMCore/Services/RucioConMon/__init__.py +0 -0
- WMCore/Services/Service.py +400 -0
- WMCore/Services/StompAMQ/__init__.py +0 -0
- WMCore/Services/TagCollector/TagCollector.py +155 -0
- WMCore/Services/TagCollector/XMLUtils.py +98 -0
- WMCore/Services/TagCollector/__init__.py +0 -0
- WMCore/Services/UUIDLib.py +13 -0
- WMCore/Services/UserFileCache/UserFileCache.py +160 -0
- WMCore/Services/UserFileCache/__init__.py +8 -0
- WMCore/Services/WMAgent/WMAgent.py +63 -0
- WMCore/Services/WMAgent/__init__.py +0 -0
- WMCore/Services/WMArchive/CMSSWMetrics.py +526 -0
- WMCore/Services/WMArchive/DataMap.py +463 -0
- WMCore/Services/WMArchive/WMArchive.py +33 -0
- WMCore/Services/WMArchive/__init__.py +0 -0
- WMCore/Services/WMBS/WMBS.py +97 -0
- WMCore/Services/WMBS/__init__.py +0 -0
- WMCore/Services/WMStats/DataStruct/RequestInfoCollection.py +300 -0
- WMCore/Services/WMStats/DataStruct/__init__.py +0 -0
- WMCore/Services/WMStats/WMStatsPycurl.py +145 -0
- WMCore/Services/WMStats/WMStatsReader.py +445 -0
- WMCore/Services/WMStats/WMStatsWriter.py +273 -0
- WMCore/Services/WMStats/__init__.py +0 -0
- WMCore/Services/WMStatsServer/WMStatsServer.py +134 -0
- WMCore/Services/WMStatsServer/__init__.py +0 -0
- WMCore/Services/WorkQueue/WorkQueue.py +492 -0
- WMCore/Services/WorkQueue/__init__.py +0 -0
- WMCore/Services/__init__.py +8 -0
- WMCore/Services/pycurl_manager.py +574 -0
- WMCore/WMBase.py +50 -0
- WMCore/WMConnectionBase.py +164 -0
- WMCore/WMException.py +183 -0
- WMCore/WMExceptions.py +269 -0
- WMCore/WMFactory.py +76 -0
- WMCore/WMInit.py +228 -0
- WMCore/WMLogging.py +108 -0
- WMCore/WMSpec/ConfigSectionTree.py +442 -0
- WMCore/WMSpec/Persistency.py +135 -0
- WMCore/WMSpec/Steps/BuildMaster.py +87 -0
- WMCore/WMSpec/Steps/BuildTools.py +201 -0
- WMCore/WMSpec/Steps/Builder.py +97 -0
- WMCore/WMSpec/Steps/Diagnostic.py +89 -0
- WMCore/WMSpec/Steps/Emulator.py +62 -0
- WMCore/WMSpec/Steps/ExecuteMaster.py +208 -0
- WMCore/WMSpec/Steps/Executor.py +210 -0
- WMCore/WMSpec/Steps/StepFactory.py +213 -0
- WMCore/WMSpec/Steps/TaskEmulator.py +75 -0
- WMCore/WMSpec/Steps/Template.py +204 -0
- WMCore/WMSpec/Steps/Templates/AlcaHarvest.py +76 -0
- WMCore/WMSpec/Steps/Templates/CMSSW.py +613 -0
- WMCore/WMSpec/Steps/Templates/DQMUpload.py +59 -0
- WMCore/WMSpec/Steps/Templates/DeleteFiles.py +70 -0
- WMCore/WMSpec/Steps/Templates/LogArchive.py +84 -0
- WMCore/WMSpec/Steps/Templates/LogCollect.py +105 -0
- WMCore/WMSpec/Steps/Templates/StageOut.py +105 -0
- WMCore/WMSpec/Steps/Templates/__init__.py +10 -0
- WMCore/WMSpec/Steps/WMExecutionFailure.py +21 -0
- WMCore/WMSpec/Steps/__init__.py +8 -0
- WMCore/WMSpec/Utilities.py +63 -0
- WMCore/WMSpec/WMSpecErrors.py +12 -0
- WMCore/WMSpec/WMStep.py +347 -0
- WMCore/WMSpec/WMTask.py +1980 -0
- WMCore/WMSpec/WMWorkload.py +2288 -0
- WMCore/WMSpec/WMWorkloadTools.py +370 -0
- WMCore/WMSpec/__init__.py +9 -0
- WMCore/WorkQueue/DataLocationMapper.py +273 -0
- WMCore/WorkQueue/DataStructs/ACDCBlock.py +47 -0
- WMCore/WorkQueue/DataStructs/Block.py +48 -0
- WMCore/WorkQueue/DataStructs/CouchWorkQueueElement.py +148 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElement.py +274 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElementResult.py +152 -0
- WMCore/WorkQueue/DataStructs/WorkQueueElementsSummary.py +185 -0
- WMCore/WorkQueue/DataStructs/__init__.py +0 -0
- WMCore/WorkQueue/Policy/End/EndPolicyInterface.py +44 -0
- WMCore/WorkQueue/Policy/End/SingleShot.py +22 -0
- WMCore/WorkQueue/Policy/End/__init__.py +32 -0
- WMCore/WorkQueue/Policy/PolicyInterface.py +17 -0
- WMCore/WorkQueue/Policy/Start/Block.py +258 -0
- WMCore/WorkQueue/Policy/Start/Dataset.py +180 -0
- WMCore/WorkQueue/Policy/Start/MonteCarlo.py +131 -0
- WMCore/WorkQueue/Policy/Start/ResubmitBlock.py +171 -0
- WMCore/WorkQueue/Policy/Start/StartPolicyInterface.py +316 -0
- WMCore/WorkQueue/Policy/Start/__init__.py +34 -0
- WMCore/WorkQueue/Policy/__init__.py +57 -0
- WMCore/WorkQueue/WMBSHelper.py +772 -0
- WMCore/WorkQueue/WorkQueue.py +1237 -0
- WMCore/WorkQueue/WorkQueueBackend.py +750 -0
- WMCore/WorkQueue/WorkQueueBase.py +39 -0
- WMCore/WorkQueue/WorkQueueExceptions.py +44 -0
- WMCore/WorkQueue/WorkQueueReqMgrInterface.py +278 -0
- WMCore/WorkQueue/WorkQueueUtils.py +130 -0
- WMCore/WorkQueue/__init__.py +13 -0
- WMCore/Wrappers/JsonWrapper/JSONThunker.py +342 -0
- WMCore/Wrappers/JsonWrapper/__init__.py +7 -0
- WMCore/Wrappers/__init__.py +6 -0
- WMCore/__init__.py +10 -0
- wmglobalqueue-2.3.10.data/data/bin/wmc-dist-patch +15 -0
- wmglobalqueue-2.3.10.data/data/bin/wmc-dist-unpatch +8 -0
- wmglobalqueue-2.3.10.data/data/bin/wmc-httpd +3 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/.couchapprc +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/README.md +40 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/index.html +264 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/ElementInfoByWorkflow.js +96 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/StuckElementInfo.js +57 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/WorkloadInfoTable.js +80 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/dataTable.js +70 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/namespace.js +23 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/style/main.css +75 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/couchapp.json +4 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/filters/childQueueFilter.js +13 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/filters/filterDeletedDocs.js +3 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/filters/queueFilter.js +11 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/language +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lib/mustache.js +333 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lib/validate.js +27 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lib/workqueue_utils.js +61 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/elementsDetail.js +28 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/filter.js +86 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/stuckElements.js +38 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/workRestrictions.js +153 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/workflowSummary.js +28 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/rewrites.json +73 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/shows/redirect.js +23 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/shows/status.js +40 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/ElementSummaryByWorkflow.html +27 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/StuckElementSummary.html +26 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/TaskStatus.html +23 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/WorkflowSummary.html +27 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/partials/workqueue-common-lib.html +2 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib-remote.html +16 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib.html +18 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/updates/in-place.js +50 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/validate_doc_update.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.couch.app.js +235 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.pathbinder.js +173 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeData/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeData/reduce.js +2 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeParentData/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeParentData/reduce.js +2 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activePileupData/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activePileupData/reduce.js +2 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/analyticsData/map.js +11 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/analyticsData/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/availableByPriority/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/conflicts/map.js +5 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elements/map.js +5 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByData/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByParent/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByParentData/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByPileupData/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByStatus/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsBySubscription/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/map.js +8 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/reduce.js +3 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsDetailByWorkflowAndStatus/map.js +26 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/map.js +10 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByRequest/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByRequest/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatus/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatus/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/openRequests/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/recent-items/map.js +5 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/reduce.js +1 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/specsByWorkflow/map.js +5 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/stuckElements/map.js +38 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/map.js +12 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/reduce.js +3 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrl/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrl/reduce.js +2 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/map.js +6 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/reduce.js +2 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/workflowSummary/map.js +9 -0
- wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/workflowSummary/reduce.js +10 -0
- wmglobalqueue-2.3.10.dist-info/LICENSE +202 -0
- wmglobalqueue-2.3.10.dist-info/METADATA +24 -0
- wmglobalqueue-2.3.10.dist-info/NOTICE +16 -0
- wmglobalqueue-2.3.10.dist-info/RECORD +345 -0
- wmglobalqueue-2.3.10.dist-info/WHEEL +5 -0
- wmglobalqueue-2.3.10.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
File : DBSConcurrency.py
|
|
4
|
+
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
5
|
+
Description: dedicated module to holds DBS related functions executed
|
|
6
|
+
concurrent calls to DBS APIs.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import urllib
|
|
11
|
+
from WMCore.Services.pycurl_manager import getdata as multi_getdata
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def getBlockInfo4PU(blockNames, dbsUrl, ckey, cert):
|
|
15
|
+
"""
|
|
16
|
+
Fetch block information details, file list and number of events, from DBS
|
|
17
|
+
server. Here we use concrete set of parameters for DBS to use in this case, i.e.
|
|
18
|
+
we must look-up only valid files and get full details from the DBS API (in order
|
|
19
|
+
to get number of events).
|
|
20
|
+
:param blockNames: list of block names
|
|
21
|
+
:param dbsUrl: dbs URL
|
|
22
|
+
:param ckey: user keyfile
|
|
23
|
+
:param cert: user certificate
|
|
24
|
+
:return: dictionary of {block: {"FileList": list of strings, "NumberOfEvents": integer}, ...}
|
|
25
|
+
"""
|
|
26
|
+
urls = []
|
|
27
|
+
for blk in blockNames:
|
|
28
|
+
# need to encode block name properly
|
|
29
|
+
block = urllib.parse.quote_plus(blk)
|
|
30
|
+
url = f"{dbsUrl}/files?detail=true&validFileOnly=1&block_name={block}"
|
|
31
|
+
urls.append(url)
|
|
32
|
+
# place concurrent calls to DBS, please note that multi_getdata is generator, therefore
|
|
33
|
+
# it does not put DBS results into the memory until this generator is iterated
|
|
34
|
+
results = multi_getdata(urls, ckey, cert)
|
|
35
|
+
# parse output of getdata in some form
|
|
36
|
+
blockInfo = {}
|
|
37
|
+
for row in results:
|
|
38
|
+
blk = row['url'].split('block_name=')[-1]
|
|
39
|
+
block = urllib.parse.unquote_plus(blk)
|
|
40
|
+
data = json.loads(row['data'])
|
|
41
|
+
files = [r['logical_file_name'] for r in data]
|
|
42
|
+
nevents = sum([r['event_count'] for r in data])
|
|
43
|
+
blockInfo[block] = {'FileList': files, 'NumberOfEvents': nevents}
|
|
44
|
+
return blockInfo
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
DBSErrors represents generic class to handle DBS Go server errors
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
|
|
9
|
+
from WMCore.Services.DBS.ProdException import ProdException
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def formatEx(excepInst):
|
|
13
|
+
"""
|
|
14
|
+
_formatEx_
|
|
15
|
+
|
|
16
|
+
given a DbdException instance, generate a simple message from it
|
|
17
|
+
"""
|
|
18
|
+
msg = "%s:%s %s" % (excepInst.__class__.__name__,
|
|
19
|
+
excepInst.getErrorMessage(),
|
|
20
|
+
excepInst.getErrorCode(),
|
|
21
|
+
)
|
|
22
|
+
return msg
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def formatEx3(excepInst):
|
|
26
|
+
"""
|
|
27
|
+
_formatEx_
|
|
28
|
+
|
|
29
|
+
given a DbdException instance, generate a simple message from it
|
|
30
|
+
"""
|
|
31
|
+
msg = "%s:%s" % (excepInst.__class__.__name__, str(excepInst))
|
|
32
|
+
return msg
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class DataMgmtError(ProdException):
|
|
36
|
+
"""
|
|
37
|
+
_DataMgmtError_
|
|
38
|
+
|
|
39
|
+
General Exception from DataMgmt Interface
|
|
40
|
+
|
|
41
|
+
"""
|
|
42
|
+
def __init__(self, message, errorNo=1000, **data):
|
|
43
|
+
ProdException.__init__(self, message, errorNo, **data)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class DBSWriterError(DataMgmtError):
|
|
47
|
+
"""
|
|
48
|
+
_DBSWriterError_
|
|
49
|
+
|
|
50
|
+
Generic Exception for DBS Write Error
|
|
51
|
+
|
|
52
|
+
"""
|
|
53
|
+
def __init__(self, msg, **data):
|
|
54
|
+
DataMgmtError.__init__(self, msg, 1001, **data)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class DBSReaderError(DataMgmtError):
|
|
58
|
+
"""
|
|
59
|
+
_DBSReaderError_
|
|
60
|
+
|
|
61
|
+
Generic Exception for DBS Read Error
|
|
62
|
+
|
|
63
|
+
"""
|
|
64
|
+
def __init__(self, msg, **data):
|
|
65
|
+
DataMgmtError.__init__(self, msg, 1002, **data)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class DBSError():
|
|
69
|
+
"""
|
|
70
|
+
DBSError provides generic interface for DBS (Go-based) errors
|
|
71
|
+
"""
|
|
72
|
+
def __init__(self, ex):
|
|
73
|
+
try:
|
|
74
|
+
if hasattr(ex, "body"):
|
|
75
|
+
# case of DBSClient HTTPError
|
|
76
|
+
self.data = json.loads(ex.body)[0]
|
|
77
|
+
else:
|
|
78
|
+
self.data = json.loads(ex)[0]
|
|
79
|
+
except Exception as exp:
|
|
80
|
+
self.data = str(ex)
|
|
81
|
+
|
|
82
|
+
def getHttpCode(self):
|
|
83
|
+
"""
|
|
84
|
+
:return: HTTP error code
|
|
85
|
+
"""
|
|
86
|
+
if isinstance(self.data, dict):
|
|
87
|
+
return self.data['http']['code']
|
|
88
|
+
return 0
|
|
89
|
+
|
|
90
|
+
def getServerCode(self):
|
|
91
|
+
"""
|
|
92
|
+
:return: DBS server error code which is defined here
|
|
93
|
+
- https://github.com/dmwm/dbs2go/blob/master/dbs/errors.go
|
|
94
|
+
"""
|
|
95
|
+
if isinstance(self.data, dict):
|
|
96
|
+
return self.data['error']['code']
|
|
97
|
+
return 0
|
|
98
|
+
|
|
99
|
+
def getMessage(self):
|
|
100
|
+
"""
|
|
101
|
+
:return: DBS server error message (consice output, last error in DBS error chain)
|
|
102
|
+
"""
|
|
103
|
+
if isinstance(self.data, dict):
|
|
104
|
+
return self.data['error']['message']
|
|
105
|
+
return ""
|
|
106
|
+
|
|
107
|
+
def getReason(self):
|
|
108
|
+
"""
|
|
109
|
+
:return: DBS server error reason (expanded message)
|
|
110
|
+
"""
|
|
111
|
+
if isinstance(self.data, dict):
|
|
112
|
+
return self.data['error']['reason']
|
|
113
|
+
return ""
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
_DBSReader_
|
|
4
|
+
|
|
5
|
+
Readonly DBS Interface
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from WMCore.Services.DBS.DBSErrors import DBSReaderError
|
|
10
|
+
from WMCore.Services.DBS.DBS3Reader import DBS3Reader
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def DBSReader(endpoint, **kwargs):
|
|
14
|
+
"""Function to find and instantiate desired DBSReader object"""
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
dbs = DBS3Reader(endpoint, **kwargs)
|
|
18
|
+
# if this doesn't throw endpoint is dbs3
|
|
19
|
+
dbs.dbs.serverinfo()
|
|
20
|
+
return dbs
|
|
21
|
+
except Exception as ex:
|
|
22
|
+
msg = 'Instantiating DBS3Reader failed with %s\n' % str(ex)
|
|
23
|
+
raise DBSReaderError("Can't contact DBS at %s, got errors %s" % (endpoint, msg))
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
_DBSUtils_
|
|
4
|
+
|
|
5
|
+
set of common utilities for DBS3Reader
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
import json
|
|
9
|
+
import urllib
|
|
10
|
+
from urllib.parse import urlparse, parse_qs, quote_plus
|
|
11
|
+
from collections import defaultdict
|
|
12
|
+
|
|
13
|
+
from Utils.CertTools import cert, ckey
|
|
14
|
+
from dbs.apis.dbsClient import aggFileLumis, aggFileParents
|
|
15
|
+
from WMCore.Services.pycurl_manager import getdata as multi_getdata
|
|
16
|
+
from Utils.PortForward import PortForward
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def dbsListFileParents(dbsUrl, blocks):
|
|
20
|
+
"""
|
|
21
|
+
Concurrent counter part of DBS listFileParents API
|
|
22
|
+
|
|
23
|
+
:param dbsUrl: DBS URL
|
|
24
|
+
:param blocks: list of blocks
|
|
25
|
+
:return: list of file parents
|
|
26
|
+
"""
|
|
27
|
+
urls = ['%s/fileparents?block_name=%s' % (dbsUrl, quote_plus(b)) for b in blocks]
|
|
28
|
+
func = aggFileParents
|
|
29
|
+
uKey = 'block_name'
|
|
30
|
+
return getUrls(urls, func, uKey)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def dbsListFileLumis(dbsUrl, blocks):
|
|
34
|
+
"""
|
|
35
|
+
Concurrent counter part of DBS listFileLumis API
|
|
36
|
+
|
|
37
|
+
:param dbsUrl: DBS URL
|
|
38
|
+
:param blocks: list of blocks
|
|
39
|
+
:return: list of file lumis
|
|
40
|
+
"""
|
|
41
|
+
urls = ['%s/filelumis?block_name=%s' % (dbsUrl, quote_plus(b)) for b in blocks]
|
|
42
|
+
func = aggFileLumis
|
|
43
|
+
uKey = 'block_name'
|
|
44
|
+
return getUrls(urls, func, uKey)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def dbsBlockOrigin(dbsUrl, blocks):
|
|
48
|
+
"""
|
|
49
|
+
Concurrent counter part of DBS files API
|
|
50
|
+
|
|
51
|
+
:param dbsUrl: DBS URL
|
|
52
|
+
:param blocks: list of blocks
|
|
53
|
+
:return: list of block origins for a given parent lfns
|
|
54
|
+
"""
|
|
55
|
+
urls = ['%s/blockorigin?block_name=%s' % (dbsUrl, quote_plus(b)) for b in blocks]
|
|
56
|
+
func = None
|
|
57
|
+
uKey = 'block_name'
|
|
58
|
+
return getUrls(urls, func, uKey)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def dbsParentFilesGivenParentDataset(dbsUrl, parentDataset, fInfo):
|
|
62
|
+
"""
|
|
63
|
+
Obtain parent files for given fileInfo object
|
|
64
|
+
|
|
65
|
+
:param dbsUrl: DBS URL
|
|
66
|
+
:param parentDataset: parent dataset name
|
|
67
|
+
:param fInfo: file info object
|
|
68
|
+
:return: list of parent files for given file info object
|
|
69
|
+
"""
|
|
70
|
+
portForwarder = PortForward(8443)
|
|
71
|
+
urls = []
|
|
72
|
+
for fileInfo in fInfo:
|
|
73
|
+
run = fileInfo['run_num']
|
|
74
|
+
lumis = urllib.parse.quote_plus(str(fileInfo['lumi_section_num']))
|
|
75
|
+
url = f'{dbsUrl}/files?dataset={parentDataset}&run_num={run}&lumi_list={lumis}'
|
|
76
|
+
urls.append(portForwarder(url))
|
|
77
|
+
func = None
|
|
78
|
+
uKey = None
|
|
79
|
+
rdict = getUrls(urls, func, uKey)
|
|
80
|
+
parentFiles = defaultdict(set)
|
|
81
|
+
for fileInfo in fInfo:
|
|
82
|
+
run = fileInfo['run_num']
|
|
83
|
+
lumis = urllib.parse.quote_plus(str(fileInfo['lumi_section_num']))
|
|
84
|
+
url = f'{dbsUrl}/files?dataset={parentDataset}&run_num={run}&lumi_list={lumis}'
|
|
85
|
+
url = portForwarder(url)
|
|
86
|
+
if url in rdict:
|
|
87
|
+
pFileList = rdict[url]
|
|
88
|
+
pFiles = {x['logical_file_name'] for x in pFileList}
|
|
89
|
+
parentFiles[fileInfo['logical_file_name']] = \
|
|
90
|
+
parentFiles[fileInfo['logical_file_name']].union(pFiles)
|
|
91
|
+
return parentFiles
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def getUrls(urls, aggFunc, uKey=None):
|
|
95
|
+
"""
|
|
96
|
+
Perform parallel DBS calls for given set of urls and apply given aggregation
|
|
97
|
+
function to the results.
|
|
98
|
+
|
|
99
|
+
:param urls: list of DBS urls to call
|
|
100
|
+
:param aggFunc: aggregation function
|
|
101
|
+
:param uKey: url parameter to use for final dictionary
|
|
102
|
+
:return: dictionary of resuls where keys are urls and values are obtained results
|
|
103
|
+
"""
|
|
104
|
+
data = multi_getdata(urls, ckey(), cert())
|
|
105
|
+
|
|
106
|
+
rdict = {}
|
|
107
|
+
for row in data:
|
|
108
|
+
url = row['url']
|
|
109
|
+
code = int(row.get('code', 200))
|
|
110
|
+
error = row.get('error')
|
|
111
|
+
if code != 200:
|
|
112
|
+
msg = f"Fail to query {url}. Error: {code} {error}"
|
|
113
|
+
raise RuntimeError(msg)
|
|
114
|
+
if uKey:
|
|
115
|
+
key = urlParams(url).get(uKey)
|
|
116
|
+
else:
|
|
117
|
+
key = url
|
|
118
|
+
data = row.get('data', [])
|
|
119
|
+
res = json.loads(data)
|
|
120
|
+
if aggFunc:
|
|
121
|
+
rdict[key] = aggFunc(res)
|
|
122
|
+
else:
|
|
123
|
+
rdict[key] = res
|
|
124
|
+
return rdict
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def urlParams(url):
|
|
128
|
+
"""
|
|
129
|
+
Return dictionary of URL parameters
|
|
130
|
+
|
|
131
|
+
:param url: URL link
|
|
132
|
+
:return: dictionary of URL parameters
|
|
133
|
+
"""
|
|
134
|
+
parsedUrl = urlparse(url)
|
|
135
|
+
rdict = parse_qs(parsedUrl.query)
|
|
136
|
+
for key, vals in rdict.items():
|
|
137
|
+
if len(vals) == 1:
|
|
138
|
+
rdict[key] = vals[0]
|
|
139
|
+
return rdict
|
|
@@ -0,0 +1,381 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
_DBSWriterObjects_
|
|
4
|
+
|
|
5
|
+
Functions to instantiate and return DBS Objects and insert them
|
|
6
|
+
into DBS if required
|
|
7
|
+
|
|
8
|
+
"""
|
|
9
|
+
from __future__ import print_function
|
|
10
|
+
|
|
11
|
+
from builtins import int
|
|
12
|
+
|
|
13
|
+
import logging
|
|
14
|
+
|
|
15
|
+
from DBSAPI.dbsException import *
|
|
16
|
+
from DBSAPI.dbsApiException import *
|
|
17
|
+
from DBSAPI.dbsPrimaryDataset import DbsPrimaryDataset
|
|
18
|
+
from DBSAPI.dbsAlgorithm import DbsAlgorithm
|
|
19
|
+
from DBSAPI.dbsQueryableParameterSet import DbsQueryableParameterSet
|
|
20
|
+
from DBSAPI.dbsProcessedDataset import DbsProcessedDataset
|
|
21
|
+
from DBSAPI.dbsFile import DbsFile
|
|
22
|
+
from DBSAPI.dbsFileBlock import DbsFileBlock
|
|
23
|
+
from DBSAPI.dbsStorageElement import DbsStorageElement
|
|
24
|
+
from DBSAPI.dbsRun import DbsRun
|
|
25
|
+
from DBSAPI.dbsLumiSection import DbsLumiSection
|
|
26
|
+
|
|
27
|
+
def makeTierList(dataTier):
|
|
28
|
+
"""
|
|
29
|
+
_makeTierList_
|
|
30
|
+
|
|
31
|
+
Standard tool to split data tiers if they contain - chars
|
|
32
|
+
*** Do not use outside of this module ***
|
|
33
|
+
|
|
34
|
+
"""
|
|
35
|
+
tierList = dataTier.split("-")
|
|
36
|
+
return tierList
|
|
37
|
+
|
|
38
|
+
def createPrimaryDataset(datasetInfo, apiRef = None):
|
|
39
|
+
"""
|
|
40
|
+
_createPrimaryDataset_
|
|
41
|
+
|
|
42
|
+
Create and return a Primary Dataset object.
|
|
43
|
+
If apiRef is not None, it is used to insert the dataset into the
|
|
44
|
+
DBS
|
|
45
|
+
|
|
46
|
+
"""
|
|
47
|
+
if 'PrimaryDatasetType' in datasetInfo:
|
|
48
|
+
PrimaryDatasetType = datasetInfo['PrimaryDatasetType']
|
|
49
|
+
else:
|
|
50
|
+
PrimaryDatasetType = 'mc'
|
|
51
|
+
|
|
52
|
+
logging.debug("Inserting PrimaryDataset %s with Type %s", datasetInfo["PrimaryDataset"], PrimaryDatasetType)
|
|
53
|
+
primary = DbsPrimaryDataset(Name = datasetInfo["PrimaryDataset"], Type=PrimaryDatasetType)
|
|
54
|
+
|
|
55
|
+
if apiRef != None:
|
|
56
|
+
apiRef.insertPrimaryDataset(primary)
|
|
57
|
+
return primary
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def createAlgorithm(datasetInfo, configMetadata = None, apiRef = None):
|
|
61
|
+
"""
|
|
62
|
+
_createAlgorithm_
|
|
63
|
+
|
|
64
|
+
Create an algorithm assuming that datasetInfo is a
|
|
65
|
+
ProdCommon.MCPayloads.DatasetInfo like dictionary
|
|
66
|
+
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
exeName = datasetInfo['ApplicationName']
|
|
70
|
+
appVersion = datasetInfo['ApplicationVersion']
|
|
71
|
+
appFamily = datasetInfo["ApplicationFamily"]
|
|
72
|
+
|
|
73
|
+
#
|
|
74
|
+
# HACK: Problem with large PSets (is this still relevant ?)
|
|
75
|
+
#
|
|
76
|
+
# Repacker jobs have no PSetContent/PSetHash
|
|
77
|
+
#
|
|
78
|
+
psetContent = datasetInfo.get('PSetContent',None)
|
|
79
|
+
if psetContent == None:
|
|
80
|
+
psetContent = "PSET_CONTENT_NOT_AVAILABLE"
|
|
81
|
+
psetHash = datasetInfo.get('PSetHash',None)
|
|
82
|
+
if psetHash == None:
|
|
83
|
+
psetHash = "NO_PSET_HASH"
|
|
84
|
+
else:
|
|
85
|
+
if psetHash.find(";"):
|
|
86
|
+
# no need for fake hash in new schema
|
|
87
|
+
psetHash = psetHash.split(";")[0]
|
|
88
|
+
psetHash = psetHash.replace("hash=", "")
|
|
89
|
+
|
|
90
|
+
## No more hacks
|
|
91
|
+
#msg = ">>>>>>>>>>>>>>>>>>>>>>>>>>>>\n"
|
|
92
|
+
#msg += "TEST HACK USED FOR PSetContent\n"
|
|
93
|
+
#msg += ">>>>>>>>>>>>>>>>>>>>>>>>>>>>"
|
|
94
|
+
#logging.warning(msg)
|
|
95
|
+
#print msg
|
|
96
|
+
#psetContent = "This is not a PSet"
|
|
97
|
+
|
|
98
|
+
#
|
|
99
|
+
# HACK: 100 char limit on cfg file name
|
|
100
|
+
if configMetadata != None:
|
|
101
|
+
cfgName = configMetadata['name']
|
|
102
|
+
if len(cfgName) > 100:
|
|
103
|
+
msg = ">>>>>>>>>>>>>>>>>>>>>>>>>>>>\n"
|
|
104
|
+
msg += "TEST HACK USED FOR Config File Name"
|
|
105
|
+
msg += ">>>>>>>>>>>>>>>>>>>>>>>>>>>>"
|
|
106
|
+
logging.warning(msg)
|
|
107
|
+
print(msg)
|
|
108
|
+
configMetadata['name'] = cfgName[-99]
|
|
109
|
+
|
|
110
|
+
psetInstance = DbsQueryableParameterSet(
|
|
111
|
+
Hash = psetHash,
|
|
112
|
+
Name = configMetadata['name'],
|
|
113
|
+
Version = configMetadata['version'],
|
|
114
|
+
Type = configMetadata['Type'],
|
|
115
|
+
Annotation = configMetadata['annotation'],
|
|
116
|
+
Content = psetContent,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
algorithmInstance = DbsAlgorithm(
|
|
121
|
+
ExecutableName = exeName,
|
|
122
|
+
ApplicationVersion = appVersion,
|
|
123
|
+
ApplicationFamily = appFamily,
|
|
124
|
+
ParameterSetID = psetInstance
|
|
125
|
+
)
|
|
126
|
+
else:
|
|
127
|
+
psetInstance = DbsQueryableParameterSet(
|
|
128
|
+
Hash = psetHash)
|
|
129
|
+
algorithmInstance = DbsAlgorithm(
|
|
130
|
+
ExecutableName = exeName,
|
|
131
|
+
ApplicationVersion = appVersion,
|
|
132
|
+
ApplicationFamily = appFamily,
|
|
133
|
+
ParameterSetID = psetInstance
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
if apiRef != None:
|
|
137
|
+
apiRef.insertAlgorithm(algorithmInstance)
|
|
138
|
+
return algorithmInstance
|
|
139
|
+
|
|
140
|
+
def createAlgorithmForInsert(datasetInfo):
|
|
141
|
+
"""
|
|
142
|
+
_createPartialAlgorithm_
|
|
143
|
+
|
|
144
|
+
Create an Algorithm instance that uses the minimal info needed
|
|
145
|
+
to insert a file
|
|
146
|
+
|
|
147
|
+
"""
|
|
148
|
+
exeName = datasetInfo['ApplicationName']
|
|
149
|
+
appVersion = datasetInfo['ApplicationVersion']
|
|
150
|
+
appFamily = datasetInfo["ApplicationFamily"]
|
|
151
|
+
|
|
152
|
+
#
|
|
153
|
+
# Repacker jobs have no PsetContent/PSetHash
|
|
154
|
+
#
|
|
155
|
+
psetContent = datasetInfo.get('PSetContent',None)
|
|
156
|
+
if psetContent == None:
|
|
157
|
+
psetContent = "PSET_CONTENT_NOT_AVAILABLE"
|
|
158
|
+
psetHash = datasetInfo.get('PSetHash',None)
|
|
159
|
+
if psetHash == None:
|
|
160
|
+
psetHash = "NO_PSET_HASH"
|
|
161
|
+
else:
|
|
162
|
+
if psetHash.find(";"):
|
|
163
|
+
# no need for fake hash in new schema
|
|
164
|
+
psetHash = psetHash.split(";")[0]
|
|
165
|
+
psetHash = psetHash.replace("hash=", "")
|
|
166
|
+
|
|
167
|
+
psetInstance = DbsQueryableParameterSet(
|
|
168
|
+
Hash = psetHash)
|
|
169
|
+
algorithmInstance = DbsAlgorithm(
|
|
170
|
+
ExecutableName = exeName,
|
|
171
|
+
ApplicationVersion = appVersion,
|
|
172
|
+
ApplicationFamily = appFamily,
|
|
173
|
+
ParameterSetID = psetInstance
|
|
174
|
+
)
|
|
175
|
+
return algorithmInstance
|
|
176
|
+
|
|
177
|
+
def createMergeAlgorithm(datasetInfo, apiRef = None):
|
|
178
|
+
"""
|
|
179
|
+
_createMergeAlgorithm_
|
|
180
|
+
|
|
181
|
+
Create a DbsAlgorithm for a merge dataset
|
|
182
|
+
|
|
183
|
+
"""
|
|
184
|
+
exeName = datasetInfo['ApplicationName']
|
|
185
|
+
version = datasetInfo['ApplicationVersion']
|
|
186
|
+
family = datasetInfo.get('ApplicationFamily', None)
|
|
187
|
+
if (family == None) or not (family) :
|
|
188
|
+
family = datasetInfo['OutputModuleName']
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
mergeAlgo = DbsAlgorithm (
|
|
192
|
+
ExecutableName = exeName,
|
|
193
|
+
ApplicationVersion = version,
|
|
194
|
+
ApplicationFamily = family,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
if apiRef != None:
|
|
198
|
+
apiRef.insertAlgorithm(mergeAlgo)
|
|
199
|
+
return mergeAlgo
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def createProcessedDataset(primaryDataset, algorithm, datasetInfo,
|
|
205
|
+
apiRef = None):
|
|
206
|
+
"""
|
|
207
|
+
_createProcessedDataset_
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
"""
|
|
211
|
+
|
|
212
|
+
physicsGroup = datasetInfo.get("PhysicsGroup", "NoGroup")
|
|
213
|
+
status = datasetInfo.get("Status", "VALID")
|
|
214
|
+
dataTier = datasetInfo['DataTier']
|
|
215
|
+
globalTag = datasetInfo.get('Conditions', None)
|
|
216
|
+
if globalTag is None: globalTag = ''
|
|
217
|
+
|
|
218
|
+
parents = []
|
|
219
|
+
inputDataset = datasetInfo.get('ParentDataset', None)
|
|
220
|
+
if inputDataset != None:
|
|
221
|
+
parents.append(inputDataset)
|
|
222
|
+
|
|
223
|
+
tierList = makeTierList(datasetInfo['DataTier'])
|
|
224
|
+
|
|
225
|
+
name = datasetInfo['ProcessedDataset']
|
|
226
|
+
algolist=[]
|
|
227
|
+
if algorithm not in ('', None):
|
|
228
|
+
algolist=list(algorithm)
|
|
229
|
+
|
|
230
|
+
processedDataset = DbsProcessedDataset (
|
|
231
|
+
PrimaryDataset = primaryDataset,
|
|
232
|
+
AlgoList=algolist,
|
|
233
|
+
Name = name,
|
|
234
|
+
TierList = tierList,
|
|
235
|
+
ParentList = parents,
|
|
236
|
+
PhysicsGroup = physicsGroup,
|
|
237
|
+
Status = status,
|
|
238
|
+
GlobalTag = globalTag,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
if apiRef != None:
|
|
242
|
+
apiRef.insertProcessedDataset(processedDataset)
|
|
243
|
+
#
|
|
244
|
+
logging.debug("PrimaryDataset: %s ProcessedDataset: %s DataTierList: %s requested by PhysicsGroup: %s ", primaryDataset['Name'], name, tierList, physicsGroup)
|
|
245
|
+
return processedDataset
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def createDBSFiles(fjrFileInfo, jobType = None, apiRef = None):
|
|
251
|
+
"""
|
|
252
|
+
_createDBSFiles_
|
|
253
|
+
|
|
254
|
+
Create a list of DBS File instances from the file details contained
|
|
255
|
+
in a FwkJobRep.FileInfo instance describing an output file
|
|
256
|
+
Does not insert files, returns as list of DbsFile objects
|
|
257
|
+
Does insert runs and lumisections if DBS API reference is passed
|
|
258
|
+
|
|
259
|
+
"""
|
|
260
|
+
results = []
|
|
261
|
+
inputLFNs = [ x['LFN'] for x in fjrFileInfo.inputFiles]
|
|
262
|
+
checksum = fjrFileInfo.checksums['cksum']
|
|
263
|
+
adler32sum = fjrFileInfo.checksums.get('adler32', '')
|
|
264
|
+
|
|
265
|
+
nEvents = int(fjrFileInfo['TotalEvents'])
|
|
266
|
+
|
|
267
|
+
if len(fjrFileInfo.dataset)<=0:
|
|
268
|
+
logging.error("No dataset info found in FWJobReport!")
|
|
269
|
+
return results
|
|
270
|
+
|
|
271
|
+
# //
|
|
272
|
+
# // Set FileType
|
|
273
|
+
#//
|
|
274
|
+
if 'FileType' in fjrFileInfo:
|
|
275
|
+
fileType = fjrFileInfo['FileType']
|
|
276
|
+
else:
|
|
277
|
+
fileType = 'EDM'
|
|
278
|
+
|
|
279
|
+
#
|
|
280
|
+
# FIXME: at this point I should use the mc or data event type from
|
|
281
|
+
# the jobreport. Until this is supported by the framework,
|
|
282
|
+
# we use the workaround that mc job reports have an empty
|
|
283
|
+
# lumisections list (stripped in DBSInterface)
|
|
284
|
+
#
|
|
285
|
+
lumiList = []
|
|
286
|
+
if ( len(fjrFileInfo.getLumiSections()) > 0 ):
|
|
287
|
+
|
|
288
|
+
#
|
|
289
|
+
# insert runs (for data files from detector)
|
|
290
|
+
#
|
|
291
|
+
if ( apiRef != None ):
|
|
292
|
+
|
|
293
|
+
for runinfo in fjrFileInfo.runs:
|
|
294
|
+
|
|
295
|
+
run = DbsRun(
|
|
296
|
+
RunNumber = int(runinfo),
|
|
297
|
+
NumberOfEvents = 0,
|
|
298
|
+
NumberOfLumiSections = 0,
|
|
299
|
+
TotalLuminosity = 0,
|
|
300
|
+
StoreNumber = 0,
|
|
301
|
+
StartOfRun = 0,
|
|
302
|
+
EndOfRun = 0,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
apiRef.insertRun(run)
|
|
306
|
+
|
|
307
|
+
#
|
|
308
|
+
# insert lumisections (for data files from detector)
|
|
309
|
+
# associate files with lumisections (for all data files)
|
|
310
|
+
#
|
|
311
|
+
for lumiinfo in fjrFileInfo.getLumiSections():
|
|
312
|
+
|
|
313
|
+
lumi = DbsLumiSection(
|
|
314
|
+
LumiSectionNumber = int(lumiinfo['LumiSectionNumber']),
|
|
315
|
+
StartEventNumber = 0,
|
|
316
|
+
EndEventNumber = 0,
|
|
317
|
+
LumiStartTime = 0,
|
|
318
|
+
LumiEndTime = 0,
|
|
319
|
+
RunNumber = int(lumiinfo['RunNumber']),
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
# Isnt needed, causes monster slowdown
|
|
323
|
+
#if ( apiRef != None ):
|
|
324
|
+
# apiRef.insertLumiSection(lumi)
|
|
325
|
+
|
|
326
|
+
lumiList.append(lumi)
|
|
327
|
+
|
|
328
|
+
logging.debug("Lumi associated to file is: %s" % ([x for x in lumiList]))
|
|
329
|
+
|
|
330
|
+
# //
|
|
331
|
+
# // Dataset info related to files and creation of DbsFile object
|
|
332
|
+
#//
|
|
333
|
+
for dataset in fjrFileInfo.dataset:
|
|
334
|
+
|
|
335
|
+
primary = createPrimaryDataset(dataset)
|
|
336
|
+
if jobType == "Merge":
|
|
337
|
+
algo = createMergeAlgorithm(dataset)
|
|
338
|
+
else:
|
|
339
|
+
algo = createAlgorithmForInsert(dataset)
|
|
340
|
+
|
|
341
|
+
processed = createProcessedDataset(primary, algo, dataset)
|
|
342
|
+
|
|
343
|
+
dbsFileInstance = DbsFile(
|
|
344
|
+
Checksum = checksum,
|
|
345
|
+
Adler32 = adler32sum,
|
|
346
|
+
NumberOfEvents = nEvents,
|
|
347
|
+
LogicalFileName = fjrFileInfo['LFN'],
|
|
348
|
+
FileSize = int(fjrFileInfo['Size']),
|
|
349
|
+
Status = "VALID",
|
|
350
|
+
ValidationStatus = 'VALID',
|
|
351
|
+
FileType = fileType,
|
|
352
|
+
Dataset = processed,
|
|
353
|
+
TierList = makeTierList(dataset['DataTier']),
|
|
354
|
+
AlgoList = [algo],
|
|
355
|
+
LumiList = lumiList,
|
|
356
|
+
ParentList = inputLFNs,
|
|
357
|
+
BranchList = fjrFileInfo.branches,
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
results.append(dbsFileInstance)
|
|
361
|
+
return results
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def createDBSStorageElement(pnn):
|
|
365
|
+
"""
|
|
366
|
+
_createDBSStorageElement_
|
|
367
|
+
|
|
368
|
+
"""
|
|
369
|
+
return DbsStorageElement(Name = pnn)
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def createDBSFileBlock(blockName):
|
|
373
|
+
"""
|
|
374
|
+
_createDBSFileBlock_
|
|
375
|
+
|
|
376
|
+
return a DbsFileBlock object with the block name provided
|
|
377
|
+
|
|
378
|
+
NOTE: This method DOES NOT create a new block in DBS
|
|
379
|
+
|
|
380
|
+
"""
|
|
381
|
+
return DbsFileBlock(Name=blockName)
|