wmglobalqueue 2.3.10rc10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wmglobalqueue might be problematic. Click here for more details.

Files changed (345) hide show
  1. Utils/CPMetrics.py +270 -0
  2. Utils/CertTools.py +62 -0
  3. Utils/EmailAlert.py +50 -0
  4. Utils/ExtendedUnitTestCase.py +62 -0
  5. Utils/FileTools.py +182 -0
  6. Utils/IteratorTools.py +80 -0
  7. Utils/MathUtils.py +31 -0
  8. Utils/MemoryCache.py +119 -0
  9. Utils/Patterns.py +24 -0
  10. Utils/Pipeline.py +137 -0
  11. Utils/PortForward.py +97 -0
  12. Utils/ProcessStats.py +103 -0
  13. Utils/PythonVersion.py +17 -0
  14. Utils/Signals.py +36 -0
  15. Utils/TemporaryEnvironment.py +27 -0
  16. Utils/Throttled.py +227 -0
  17. Utils/Timers.py +130 -0
  18. Utils/Timestamps.py +86 -0
  19. Utils/TokenManager.py +143 -0
  20. Utils/Tracing.py +60 -0
  21. Utils/TwPrint.py +98 -0
  22. Utils/Utilities.py +308 -0
  23. Utils/__init__.py +11 -0
  24. WMCore/ACDC/Collection.py +57 -0
  25. WMCore/ACDC/CollectionTypes.py +12 -0
  26. WMCore/ACDC/CouchCollection.py +67 -0
  27. WMCore/ACDC/CouchFileset.py +238 -0
  28. WMCore/ACDC/CouchService.py +73 -0
  29. WMCore/ACDC/DataCollectionService.py +485 -0
  30. WMCore/ACDC/Fileset.py +94 -0
  31. WMCore/ACDC/__init__.py +11 -0
  32. WMCore/Algorithms/Alarm.py +39 -0
  33. WMCore/Algorithms/MathAlgos.py +274 -0
  34. WMCore/Algorithms/MiscAlgos.py +67 -0
  35. WMCore/Algorithms/ParseXMLFile.py +115 -0
  36. WMCore/Algorithms/Permissions.py +27 -0
  37. WMCore/Algorithms/Singleton.py +58 -0
  38. WMCore/Algorithms/SubprocessAlgos.py +129 -0
  39. WMCore/Algorithms/__init__.py +7 -0
  40. WMCore/Cache/GenericDataCache.py +98 -0
  41. WMCore/Cache/WMConfigCache.py +572 -0
  42. WMCore/Cache/__init__.py +0 -0
  43. WMCore/Configuration.py +651 -0
  44. WMCore/DAOFactory.py +47 -0
  45. WMCore/DataStructs/File.py +177 -0
  46. WMCore/DataStructs/Fileset.py +140 -0
  47. WMCore/DataStructs/Job.py +182 -0
  48. WMCore/DataStructs/JobGroup.py +142 -0
  49. WMCore/DataStructs/JobPackage.py +49 -0
  50. WMCore/DataStructs/LumiList.py +734 -0
  51. WMCore/DataStructs/Mask.py +219 -0
  52. WMCore/DataStructs/MathStructs/ContinuousSummaryHistogram.py +197 -0
  53. WMCore/DataStructs/MathStructs/DiscreteSummaryHistogram.py +92 -0
  54. WMCore/DataStructs/MathStructs/SummaryHistogram.py +117 -0
  55. WMCore/DataStructs/MathStructs/__init__.py +0 -0
  56. WMCore/DataStructs/Pickleable.py +24 -0
  57. WMCore/DataStructs/Run.py +256 -0
  58. WMCore/DataStructs/Subscription.py +175 -0
  59. WMCore/DataStructs/WMObject.py +47 -0
  60. WMCore/DataStructs/WorkUnit.py +112 -0
  61. WMCore/DataStructs/Workflow.py +60 -0
  62. WMCore/DataStructs/__init__.py +8 -0
  63. WMCore/Database/CMSCouch.py +1349 -0
  64. WMCore/Database/ConfigDBMap.py +29 -0
  65. WMCore/Database/CouchUtils.py +118 -0
  66. WMCore/Database/DBCore.py +198 -0
  67. WMCore/Database/DBCreator.py +113 -0
  68. WMCore/Database/DBExceptionHandler.py +57 -0
  69. WMCore/Database/DBFactory.py +110 -0
  70. WMCore/Database/DBFormatter.py +177 -0
  71. WMCore/Database/Dialects.py +13 -0
  72. WMCore/Database/ExecuteDAO.py +327 -0
  73. WMCore/Database/MongoDB.py +241 -0
  74. WMCore/Database/MySQL/Destroy.py +42 -0
  75. WMCore/Database/MySQL/ListUserContent.py +20 -0
  76. WMCore/Database/MySQL/__init__.py +9 -0
  77. WMCore/Database/MySQLCore.py +132 -0
  78. WMCore/Database/Oracle/Destroy.py +56 -0
  79. WMCore/Database/Oracle/ListUserContent.py +19 -0
  80. WMCore/Database/Oracle/__init__.py +9 -0
  81. WMCore/Database/ResultSet.py +44 -0
  82. WMCore/Database/Transaction.py +91 -0
  83. WMCore/Database/__init__.py +9 -0
  84. WMCore/Database/ipy_profile_couch.py +438 -0
  85. WMCore/GlobalWorkQueue/CherryPyThreads/CleanUpTask.py +29 -0
  86. WMCore/GlobalWorkQueue/CherryPyThreads/HeartbeatMonitor.py +105 -0
  87. WMCore/GlobalWorkQueue/CherryPyThreads/LocationUpdateTask.py +28 -0
  88. WMCore/GlobalWorkQueue/CherryPyThreads/ReqMgrInteractionTask.py +35 -0
  89. WMCore/GlobalWorkQueue/CherryPyThreads/__init__.py +0 -0
  90. WMCore/GlobalWorkQueue/__init__.py +0 -0
  91. WMCore/GroupUser/CouchObject.py +127 -0
  92. WMCore/GroupUser/Decorators.py +51 -0
  93. WMCore/GroupUser/Group.py +33 -0
  94. WMCore/GroupUser/Interface.py +73 -0
  95. WMCore/GroupUser/User.py +96 -0
  96. WMCore/GroupUser/__init__.py +11 -0
  97. WMCore/Lexicon.py +836 -0
  98. WMCore/REST/Auth.py +202 -0
  99. WMCore/REST/CherryPyPeriodicTask.py +166 -0
  100. WMCore/REST/Error.py +333 -0
  101. WMCore/REST/Format.py +642 -0
  102. WMCore/REST/HeartbeatMonitorBase.py +90 -0
  103. WMCore/REST/Main.py +623 -0
  104. WMCore/REST/Server.py +2435 -0
  105. WMCore/REST/Services.py +24 -0
  106. WMCore/REST/Test.py +120 -0
  107. WMCore/REST/Tools.py +38 -0
  108. WMCore/REST/Validation.py +250 -0
  109. WMCore/REST/__init__.py +1 -0
  110. WMCore/ReqMgr/DataStructs/RequestStatus.py +209 -0
  111. WMCore/ReqMgr/DataStructs/RequestType.py +13 -0
  112. WMCore/ReqMgr/DataStructs/__init__.py +0 -0
  113. WMCore/ReqMgr/__init__.py +1 -0
  114. WMCore/Services/AlertManager/AlertManagerAPI.py +111 -0
  115. WMCore/Services/AlertManager/__init__.py +0 -0
  116. WMCore/Services/CRIC/CRIC.py +238 -0
  117. WMCore/Services/CRIC/__init__.py +0 -0
  118. WMCore/Services/DBS/DBS3Reader.py +1044 -0
  119. WMCore/Services/DBS/DBSConcurrency.py +44 -0
  120. WMCore/Services/DBS/DBSErrors.py +113 -0
  121. WMCore/Services/DBS/DBSReader.py +23 -0
  122. WMCore/Services/DBS/DBSUtils.py +139 -0
  123. WMCore/Services/DBS/DBSWriterObjects.py +381 -0
  124. WMCore/Services/DBS/ProdException.py +133 -0
  125. WMCore/Services/DBS/__init__.py +8 -0
  126. WMCore/Services/FWJRDB/FWJRDBAPI.py +118 -0
  127. WMCore/Services/FWJRDB/__init__.py +0 -0
  128. WMCore/Services/HTTPS/HTTPSAuthHandler.py +66 -0
  129. WMCore/Services/HTTPS/__init__.py +0 -0
  130. WMCore/Services/LogDB/LogDB.py +201 -0
  131. WMCore/Services/LogDB/LogDBBackend.py +191 -0
  132. WMCore/Services/LogDB/LogDBExceptions.py +11 -0
  133. WMCore/Services/LogDB/LogDBReport.py +85 -0
  134. WMCore/Services/LogDB/__init__.py +0 -0
  135. WMCore/Services/MSPileup/__init__.py +0 -0
  136. WMCore/Services/MSUtils/MSUtils.py +54 -0
  137. WMCore/Services/MSUtils/__init__.py +0 -0
  138. WMCore/Services/McM/McM.py +173 -0
  139. WMCore/Services/McM/__init__.py +8 -0
  140. WMCore/Services/MonIT/Grafana.py +133 -0
  141. WMCore/Services/MonIT/__init__.py +0 -0
  142. WMCore/Services/PyCondor/PyCondorAPI.py +154 -0
  143. WMCore/Services/PyCondor/PyCondorUtils.py +105 -0
  144. WMCore/Services/PyCondor/__init__.py +0 -0
  145. WMCore/Services/ReqMgr/ReqMgr.py +261 -0
  146. WMCore/Services/ReqMgr/__init__.py +0 -0
  147. WMCore/Services/ReqMgrAux/ReqMgrAux.py +419 -0
  148. WMCore/Services/ReqMgrAux/__init__.py +0 -0
  149. WMCore/Services/RequestDB/RequestDBReader.py +267 -0
  150. WMCore/Services/RequestDB/RequestDBWriter.py +39 -0
  151. WMCore/Services/RequestDB/__init__.py +0 -0
  152. WMCore/Services/Requests.py +624 -0
  153. WMCore/Services/Rucio/Rucio.py +1287 -0
  154. WMCore/Services/Rucio/RucioUtils.py +74 -0
  155. WMCore/Services/Rucio/__init__.py +0 -0
  156. WMCore/Services/RucioConMon/RucioConMon.py +128 -0
  157. WMCore/Services/RucioConMon/__init__.py +0 -0
  158. WMCore/Services/Service.py +400 -0
  159. WMCore/Services/StompAMQ/__init__.py +0 -0
  160. WMCore/Services/TagCollector/TagCollector.py +155 -0
  161. WMCore/Services/TagCollector/XMLUtils.py +98 -0
  162. WMCore/Services/TagCollector/__init__.py +0 -0
  163. WMCore/Services/UUIDLib.py +13 -0
  164. WMCore/Services/UserFileCache/UserFileCache.py +160 -0
  165. WMCore/Services/UserFileCache/__init__.py +8 -0
  166. WMCore/Services/WMAgent/WMAgent.py +63 -0
  167. WMCore/Services/WMAgent/__init__.py +0 -0
  168. WMCore/Services/WMArchive/CMSSWMetrics.py +526 -0
  169. WMCore/Services/WMArchive/DataMap.py +463 -0
  170. WMCore/Services/WMArchive/WMArchive.py +33 -0
  171. WMCore/Services/WMArchive/__init__.py +0 -0
  172. WMCore/Services/WMBS/WMBS.py +97 -0
  173. WMCore/Services/WMBS/__init__.py +0 -0
  174. WMCore/Services/WMStats/DataStruct/RequestInfoCollection.py +300 -0
  175. WMCore/Services/WMStats/DataStruct/__init__.py +0 -0
  176. WMCore/Services/WMStats/WMStatsPycurl.py +145 -0
  177. WMCore/Services/WMStats/WMStatsReader.py +445 -0
  178. WMCore/Services/WMStats/WMStatsWriter.py +273 -0
  179. WMCore/Services/WMStats/__init__.py +0 -0
  180. WMCore/Services/WMStatsServer/WMStatsServer.py +134 -0
  181. WMCore/Services/WMStatsServer/__init__.py +0 -0
  182. WMCore/Services/WorkQueue/WorkQueue.py +492 -0
  183. WMCore/Services/WorkQueue/__init__.py +0 -0
  184. WMCore/Services/__init__.py +8 -0
  185. WMCore/Services/pycurl_manager.py +574 -0
  186. WMCore/WMBase.py +50 -0
  187. WMCore/WMConnectionBase.py +164 -0
  188. WMCore/WMException.py +183 -0
  189. WMCore/WMExceptions.py +269 -0
  190. WMCore/WMFactory.py +76 -0
  191. WMCore/WMInit.py +228 -0
  192. WMCore/WMLogging.py +108 -0
  193. WMCore/WMSpec/ConfigSectionTree.py +442 -0
  194. WMCore/WMSpec/Persistency.py +135 -0
  195. WMCore/WMSpec/Steps/BuildMaster.py +87 -0
  196. WMCore/WMSpec/Steps/BuildTools.py +201 -0
  197. WMCore/WMSpec/Steps/Builder.py +97 -0
  198. WMCore/WMSpec/Steps/Diagnostic.py +89 -0
  199. WMCore/WMSpec/Steps/Emulator.py +62 -0
  200. WMCore/WMSpec/Steps/ExecuteMaster.py +208 -0
  201. WMCore/WMSpec/Steps/Executor.py +210 -0
  202. WMCore/WMSpec/Steps/StepFactory.py +213 -0
  203. WMCore/WMSpec/Steps/TaskEmulator.py +75 -0
  204. WMCore/WMSpec/Steps/Template.py +204 -0
  205. WMCore/WMSpec/Steps/Templates/AlcaHarvest.py +76 -0
  206. WMCore/WMSpec/Steps/Templates/CMSSW.py +613 -0
  207. WMCore/WMSpec/Steps/Templates/DQMUpload.py +59 -0
  208. WMCore/WMSpec/Steps/Templates/DeleteFiles.py +70 -0
  209. WMCore/WMSpec/Steps/Templates/LogArchive.py +84 -0
  210. WMCore/WMSpec/Steps/Templates/LogCollect.py +105 -0
  211. WMCore/WMSpec/Steps/Templates/StageOut.py +105 -0
  212. WMCore/WMSpec/Steps/Templates/__init__.py +10 -0
  213. WMCore/WMSpec/Steps/WMExecutionFailure.py +21 -0
  214. WMCore/WMSpec/Steps/__init__.py +8 -0
  215. WMCore/WMSpec/Utilities.py +63 -0
  216. WMCore/WMSpec/WMSpecErrors.py +12 -0
  217. WMCore/WMSpec/WMStep.py +347 -0
  218. WMCore/WMSpec/WMTask.py +1980 -0
  219. WMCore/WMSpec/WMWorkload.py +2288 -0
  220. WMCore/WMSpec/WMWorkloadTools.py +370 -0
  221. WMCore/WMSpec/__init__.py +9 -0
  222. WMCore/WorkQueue/DataLocationMapper.py +269 -0
  223. WMCore/WorkQueue/DataStructs/ACDCBlock.py +47 -0
  224. WMCore/WorkQueue/DataStructs/Block.py +48 -0
  225. WMCore/WorkQueue/DataStructs/CouchWorkQueueElement.py +148 -0
  226. WMCore/WorkQueue/DataStructs/WorkQueueElement.py +274 -0
  227. WMCore/WorkQueue/DataStructs/WorkQueueElementResult.py +152 -0
  228. WMCore/WorkQueue/DataStructs/WorkQueueElementsSummary.py +185 -0
  229. WMCore/WorkQueue/DataStructs/__init__.py +0 -0
  230. WMCore/WorkQueue/Policy/End/EndPolicyInterface.py +44 -0
  231. WMCore/WorkQueue/Policy/End/SingleShot.py +22 -0
  232. WMCore/WorkQueue/Policy/End/__init__.py +32 -0
  233. WMCore/WorkQueue/Policy/PolicyInterface.py +17 -0
  234. WMCore/WorkQueue/Policy/Start/Block.py +258 -0
  235. WMCore/WorkQueue/Policy/Start/Dataset.py +180 -0
  236. WMCore/WorkQueue/Policy/Start/MonteCarlo.py +131 -0
  237. WMCore/WorkQueue/Policy/Start/ResubmitBlock.py +171 -0
  238. WMCore/WorkQueue/Policy/Start/StartPolicyInterface.py +316 -0
  239. WMCore/WorkQueue/Policy/Start/__init__.py +34 -0
  240. WMCore/WorkQueue/Policy/__init__.py +57 -0
  241. WMCore/WorkQueue/WMBSHelper.py +772 -0
  242. WMCore/WorkQueue/WorkQueue.py +1237 -0
  243. WMCore/WorkQueue/WorkQueueBackend.py +741 -0
  244. WMCore/WorkQueue/WorkQueueBase.py +39 -0
  245. WMCore/WorkQueue/WorkQueueExceptions.py +44 -0
  246. WMCore/WorkQueue/WorkQueueReqMgrInterface.py +278 -0
  247. WMCore/WorkQueue/WorkQueueUtils.py +130 -0
  248. WMCore/WorkQueue/__init__.py +13 -0
  249. WMCore/Wrappers/JsonWrapper/JSONThunker.py +342 -0
  250. WMCore/Wrappers/JsonWrapper/__init__.py +7 -0
  251. WMCore/Wrappers/__init__.py +6 -0
  252. WMCore/__init__.py +10 -0
  253. wmglobalqueue-2.3.10rc10.data/data/bin/wmc-dist-patch +15 -0
  254. wmglobalqueue-2.3.10rc10.data/data/bin/wmc-dist-unpatch +8 -0
  255. wmglobalqueue-2.3.10rc10.data/data/bin/wmc-httpd +3 -0
  256. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/.couchapprc +1 -0
  257. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/README.md +40 -0
  258. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/index.html +264 -0
  259. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/js/ElementInfoByWorkflow.js +96 -0
  260. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/js/StuckElementInfo.js +57 -0
  261. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/js/WorkloadInfoTable.js +80 -0
  262. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/js/dataTable.js +70 -0
  263. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/js/namespace.js +23 -0
  264. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/_attachments/style/main.css +75 -0
  265. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/couchapp.json +4 -0
  266. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/filters/childQueueFilter.js +13 -0
  267. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/filters/filterDeletedDocs.js +3 -0
  268. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/filters/queueFilter.js +11 -0
  269. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/language +1 -0
  270. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lib/mustache.js +333 -0
  271. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lib/validate.js +27 -0
  272. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lib/workqueue_utils.js +61 -0
  273. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lists/elementsDetail.js +28 -0
  274. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lists/filter.js +86 -0
  275. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lists/stuckElements.js +38 -0
  276. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lists/workRestrictions.js +153 -0
  277. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/lists/workflowSummary.js +28 -0
  278. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/rewrites.json +73 -0
  279. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/shows/redirect.js +23 -0
  280. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/shows/status.js +40 -0
  281. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/ElementSummaryByWorkflow.html +27 -0
  282. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/StuckElementSummary.html +26 -0
  283. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/TaskStatus.html +23 -0
  284. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/WorkflowSummary.html +27 -0
  285. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/partials/workqueue-common-lib.html +2 -0
  286. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib-remote.html +16 -0
  287. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib.html +18 -0
  288. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/updates/in-place.js +50 -0
  289. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/validate_doc_update.js +8 -0
  290. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.couch.app.js +235 -0
  291. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.pathbinder.js +173 -0
  292. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/activeData/map.js +8 -0
  293. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/activeData/reduce.js +2 -0
  294. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/activeParentData/map.js +8 -0
  295. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/activeParentData/reduce.js +2 -0
  296. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/activePileupData/map.js +8 -0
  297. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/activePileupData/reduce.js +2 -0
  298. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/analyticsData/map.js +11 -0
  299. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/analyticsData/reduce.js +1 -0
  300. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/availableByPriority/map.js +6 -0
  301. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/conflicts/map.js +5 -0
  302. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elements/map.js +5 -0
  303. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByData/map.js +8 -0
  304. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByParent/map.js +8 -0
  305. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByParentData/map.js +8 -0
  306. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByPileupData/map.js +8 -0
  307. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByStatus/map.js +8 -0
  308. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsBySubscription/map.js +6 -0
  309. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/map.js +8 -0
  310. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/reduce.js +3 -0
  311. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/elementsDetailByWorkflowAndStatus/map.js +26 -0
  312. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/map.js +10 -0
  313. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/reduce.js +1 -0
  314. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/map.js +6 -0
  315. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/reduce.js +1 -0
  316. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/map.js +6 -0
  317. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/reduce.js +1 -0
  318. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/map.js +6 -0
  319. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/reduce.js +1 -0
  320. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByRequest/map.js +6 -0
  321. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByRequest/reduce.js +1 -0
  322. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByStatus/map.js +6 -0
  323. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByStatus/reduce.js +1 -0
  324. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/map.js +6 -0
  325. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/reduce.js +1 -0
  326. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/openRequests/map.js +6 -0
  327. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/recent-items/map.js +5 -0
  328. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/map.js +6 -0
  329. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/reduce.js +1 -0
  330. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/specsByWorkflow/map.js +5 -0
  331. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/stuckElements/map.js +38 -0
  332. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/map.js +12 -0
  333. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/reduce.js +3 -0
  334. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/wmbsUrl/map.js +6 -0
  335. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/wmbsUrl/reduce.js +2 -0
  336. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/map.js +6 -0
  337. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/reduce.js +2 -0
  338. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/workflowSummary/map.js +9 -0
  339. wmglobalqueue-2.3.10rc10.data/data/data/couchapps/WorkQueue/views/workflowSummary/reduce.js +10 -0
  340. wmglobalqueue-2.3.10rc10.dist-info/METADATA +26 -0
  341. wmglobalqueue-2.3.10rc10.dist-info/RECORD +345 -0
  342. wmglobalqueue-2.3.10rc10.dist-info/WHEEL +5 -0
  343. wmglobalqueue-2.3.10rc10.dist-info/licenses/LICENSE +202 -0
  344. wmglobalqueue-2.3.10rc10.dist-info/licenses/NOTICE +16 -0
  345. wmglobalqueue-2.3.10rc10.dist-info/top_level.txt +2 -0
@@ -0,0 +1,1980 @@
1
+ #!/usr/bin/env python
2
+ # pylint: disable=W0212
3
+ # W0212 (protected-access): Access to protected names of a client class.
4
+ """
5
+ _WMTask_
6
+
7
+ Object containing a set of executable Steps which form a template for a
8
+ set of jobs.
9
+
10
+ Equivalent of a WorkflowSpec in the ProdSystem.
11
+ """
12
+ import json
13
+ from builtins import map, zip, str as newstr, bytes
14
+ from future.utils import viewitems
15
+
16
+ import logging
17
+ import os.path
18
+ import time
19
+
20
+ import WMCore.WMSpec.Steps.StepFactory as StepFactory
21
+ import WMCore.WMSpec.Utilities as SpecUtils
22
+ from WMCore.Configuration import ConfigSection
23
+ from WMCore.DataStructs.LumiList import LumiList
24
+ from WMCore.DataStructs.Workflow import Workflow as DataStructsWorkflow
25
+ from WMCore.Lexicon import lfnBase
26
+ from WMCore.WMSpec.ConfigSectionTree import ConfigSectionTree, TreeHelper
27
+ from WMCore.WMSpec.Steps.BuildMaster import BuildMaster
28
+ from WMCore.WMSpec.Steps.ExecuteMaster import ExecuteMaster
29
+ from WMCore.WMSpec.Steps.Template import CoreHelper
30
+ from WMCore.WMSpec.WMStep import WMStep, WMStepHelper
31
+
32
+
33
+ def getTaskFromStep(stepRef):
34
+ """
35
+ _getTaskFromStep_
36
+
37
+ Traverse up the step tree until finding the first WMTask entry,
38
+ return it wrapped in a WMTaskHelper
39
+
40
+ """
41
+ nodeData = stepRef
42
+ if isinstance(stepRef, WMStepHelper):
43
+ nodeData = stepRef.data
44
+
45
+ taskNode = SpecUtils.findTaskAboveNode(nodeData)
46
+ if taskNode is None:
47
+ msg = "Unable to find Task containing step\n"
48
+ # TODO: Replace with real exception class
49
+ raise RuntimeError(msg)
50
+
51
+ return WMTaskHelper(taskNode)
52
+
53
+
54
+ def buildLumiMask(runs, lumis):
55
+ """
56
+ Runs are saved in the spec as a list of integers.
57
+ The lumi mask associated to each run is saved as a list of strings
58
+ where each string is in a format like '1,4,23,45'
59
+
60
+ The method convert these parameters in the corresponding lumiMask,
61
+ e.g.: runs=['3','4'], lumis=['1,4,23,45', '5,84,234,445'] => lumiMask = {'3':[[1,4],[23,45]],'4':[[5,84],[234,445]]}
62
+ """
63
+
64
+ if len(runs) != len(lumis):
65
+ raise ValueError("runs and lumis must have same length")
66
+ for lumi in lumis:
67
+ if len(lumi.split(',')) % 2:
68
+ raise ValueError("Needs an even number of lumi in each element of lumis list")
69
+
70
+ lumiLists = [list(map(list, list(zip([int(y) for y in x.split(',')][::2], [int(y) for y in x.split(',')][1::2]))))
71
+ for x
72
+ in lumis]
73
+ strRuns = [str(run) for run in runs]
74
+
75
+ lumiMask = dict(list(zip(strRuns, lumiLists)))
76
+
77
+ return lumiMask
78
+
79
+
80
+ class WMTaskHelper(TreeHelper):
81
+ """
82
+ _WMTaskHelper_
83
+
84
+ Util wrapper containing tools & methods for manipulating the WMTask
85
+ data object.
86
+ """
87
+
88
+ def __init__(self, wmTask):
89
+ TreeHelper.__init__(self, wmTask)
90
+ self.startTime = None
91
+ self.endTime = None
92
+ self.monitoring = None
93
+
94
+ def addTask(self, taskName):
95
+ """
96
+ _addTask_
97
+
98
+ Add a new task as a subtask with the name provided and
99
+ return it wrapped in a TaskHelper
100
+
101
+ """
102
+ node = WMTaskHelper(WMTask(taskName))
103
+ self.addNode(node)
104
+ pName = "%s/%s" % (self.getPathName(), taskName)
105
+ node.setPathName(pName)
106
+ return node
107
+
108
+ def taskIterator(self):
109
+ """
110
+ _taskIterator_
111
+
112
+ return output of nodeIterator(self) wrapped in TaskHelper instance
113
+
114
+ """
115
+ for x in self.nodeIterator():
116
+ yield WMTaskHelper(x)
117
+
118
+ def childTaskIterator(self):
119
+ """
120
+ _childTaskIterator_
121
+
122
+ Iterate over all the first generation child tasks.
123
+ """
124
+ for x in self.firstGenNodeChildIterator():
125
+ yield WMTaskHelper(x)
126
+
127
+ def setPathName(self, pathName):
128
+ """
129
+ _setPathName_
130
+
131
+ Set the path name of the task within the workload
132
+ Used internally when addin tasks to workloads or subtasks
133
+
134
+ """
135
+ self.data.pathName = pathName
136
+
137
+ def getPathName(self):
138
+ """
139
+ _getPathName_
140
+
141
+ get the path name of this task reflecting its
142
+ structure within the workload and task tree
143
+
144
+ """
145
+ return self.data.pathName
146
+
147
+ def name(self):
148
+ """
149
+ _name_
150
+
151
+ Retrieve the name of this task.
152
+ """
153
+ return self.data._internal_name
154
+
155
+ def listPathNames(self):
156
+ """
157
+ _listPathNames_
158
+
159
+ """
160
+ for t in self.taskIterator():
161
+ yield t.getPathName()
162
+
163
+ def listNames(self):
164
+ """
165
+ _listNames_
166
+ Returns a generator with the name of all the children tasks
167
+ """
168
+ for t in self.taskIterator():
169
+ yield t.name()
170
+
171
+ def listChildNames(self):
172
+ """
173
+ _listChildNames_
174
+ Return a list with the name of the first generation children tasks
175
+ """
176
+ names = []
177
+ for t in self.childTaskIterator():
178
+ names.append(t.name())
179
+ return names
180
+
181
+ def makeWorkflow(self):
182
+ """
183
+ _makeWorkflow_
184
+
185
+ Create a WMBS compatible Workflow structure that represents this
186
+ task and the information contained within it
187
+
188
+ """
189
+ workflow = DataStructsWorkflow()
190
+ workflow.task = self.getPathName()
191
+ return workflow
192
+
193
+ def steps(self):
194
+ """get WMStep structure"""
195
+ if self.data.steps.topStepName is None:
196
+ return None
197
+ step = getattr(self.data.steps, self.data.steps.topStepName, None)
198
+ return WMStepHelper(step)
199
+
200
+ def getTopStepName(self):
201
+ """
202
+ _getTopStepName_
203
+
204
+ Retrieve the name of the top step.
205
+ """
206
+ return self.data.steps.topStepName
207
+
208
+ def setStep(self, wmStep):
209
+ """set topStep to be the step instance provided"""
210
+ stepData = wmStep
211
+ if isinstance(wmStep, WMStepHelper):
212
+ stepData = wmStep.data
213
+ stepHelper = wmStep
214
+ else:
215
+ stepHelper = WMStepHelper(wmStep)
216
+
217
+ stepName = stepHelper.name()
218
+ stepHelper.setTopOfTree()
219
+ setattr(self.data.steps, stepName, stepData)
220
+ setattr(self.data.steps, "topStepName", stepName)
221
+ return
222
+
223
+ def listAllStepNames(self, cmsRunOnly=False):
224
+ """
225
+ _listAllStepNames_
226
+
227
+ Get a list of all the step names contained in this task.
228
+ """
229
+ step = self.steps()
230
+ if step:
231
+ stepNames = step.allNodeNames()
232
+ if cmsRunOnly:
233
+ stepNames = [step for step in stepNames if step.startswith("cmsRun")]
234
+ return stepNames
235
+ else:
236
+ return []
237
+
238
+ def getStep(self, stepName):
239
+ """get a particular step from the workflow"""
240
+ if self.data.steps.topStepName is None:
241
+ return None
242
+ topStep = self.steps()
243
+ return topStep.getStep(stepName)
244
+
245
+ def makeStep(self, stepName):
246
+ """
247
+ _makeStep_
248
+
249
+ create a new WMStep instance, install it as the top step and
250
+ return the reference to the new step wrapped in a StepHelper
251
+
252
+ """
253
+ newStep = WMStep(stepName)
254
+ self.setStep(newStep)
255
+ return WMStepHelper(newStep)
256
+
257
+ def applyTemplates(self):
258
+ """
259
+ _applyTemplates_
260
+
261
+ For each step, load the appropriate template and install the default structure
262
+
263
+ TODO: Exception handling
264
+
265
+ """
266
+ for step in self.steps().nodeIterator():
267
+ stepType = step.stepType
268
+ template = StepFactory.getStepTemplate(stepType)
269
+ template(step)
270
+
271
+ def getStepHelper(self, stepName):
272
+ """
273
+ _getStepHelper_
274
+
275
+ Get the named step, look up its type specific helper and retrieve
276
+ the step wrapped in the type based helper.
277
+
278
+ """
279
+ step = self.getStep(stepName)
280
+ stepType = step.stepType()
281
+ template = StepFactory.getStepTemplate(stepType)
282
+ helper = template.helper(step.data)
283
+ return helper
284
+
285
+ def getOutputModulesForTask(self, cmsRunOnly=False):
286
+ """
287
+ _getOutputModulesForTask_
288
+
289
+ Retrieve all the output modules in the given task.
290
+ If cmsRunOnly is set to True, then return the output modules for
291
+ cmsRun steps only.
292
+ """
293
+ outputModules = []
294
+ for stepName in self.listAllStepNames(cmsRunOnly):
295
+ outputModules.append(self.getOutputModulesForStep(stepName))
296
+ return outputModules
297
+
298
+ def getIgnoredOutputModulesForTask(self):
299
+ """
300
+ _getIgnoredOutputModulesForTask_
301
+
302
+ Retrieve the ignored output modules in the given task.
303
+ """
304
+ ignoredOutputModules = []
305
+ for stepName in self.listAllStepNames():
306
+ stepHelper = self.getStepHelper(stepName)
307
+ ignoredOutputModules.extend(stepHelper.getIgnoredOutputModules())
308
+ return ignoredOutputModules
309
+
310
+ def getOutputModulesForStep(self, stepName):
311
+ """
312
+ _getOutputModulesForStep_
313
+
314
+ Retrieve all the output modules for the particular step.
315
+ """
316
+ step = self.getStep(stepName)
317
+
318
+ if hasattr(step.data, "output"):
319
+ if hasattr(step.data.output, "modules"):
320
+ return step.data.output.modules
321
+
322
+ return ConfigSection()
323
+
324
+ def build(self, workingDir):
325
+ """
326
+ _build_
327
+
328
+ Invoke the build process to create the job in the working dir provided
329
+
330
+ """
331
+ master = BuildMaster(workingDir)
332
+ master(self)
333
+ return
334
+
335
+ def addEnvironmentVariables(self, envDict):
336
+ """
337
+ _addEnvironmentVariables_
338
+
339
+ add a key = value style setting to the environment for this task and all
340
+ its children
341
+ """
342
+ for key, value in viewitems(envDict):
343
+ setattr(self.data.environment, key, value)
344
+ for task in self.childTaskIterator():
345
+ task.addEnvironmentVariables(envDict)
346
+ return
347
+
348
+ def setOverrideCatalog(self, tfcFile):
349
+ """
350
+ _setOverrideCatalog_
351
+
352
+ Used for setting overrideCatalog option for each step in the task.
353
+ """
354
+ for step in self.steps().nodeIterator():
355
+ step = CoreHelper(step)
356
+ step.setOverrideCatalog(tfcFile)
357
+ for task in self.childTaskIterator():
358
+ task.setOverrideCatalog(tfcFile)
359
+ return
360
+
361
+ def getEnvironmentVariables(self):
362
+ """
363
+ _getEnvironmentVariables_
364
+
365
+ Retrieve a dictionary with all environment variables defined for this task
366
+ """
367
+ return self.data.environment.dictionary_()
368
+
369
+ def setupEnvironment(self):
370
+ """
371
+ _setupEnvironment_
372
+
373
+ I don't know if this should go here.
374
+ Setup the environment variables mandated in the WMTask
375
+ """
376
+
377
+ if not hasattr(self.data, 'environment'):
378
+ # No environment to setup, pass
379
+ return
380
+
381
+ envDict = self.data.environment.dictionary_()
382
+
383
+ for key in envDict:
384
+ if str(envDict[key].__class__) == "<class 'WMCore.Configuration.ConfigSection'>":
385
+ # At this point we do not support the
386
+ # setting of sub-sections for environment variables
387
+ continue
388
+ else:
389
+ os.environ[key] = envDict[key]
390
+
391
+ return
392
+
393
+ def execute(self, wmbsJob):
394
+ """
395
+ _execute_
396
+
397
+ Invoke execution of the steps
398
+
399
+ """
400
+ self.startTime = time.time()
401
+ self.setupEnvironment()
402
+ master = ExecuteMaster()
403
+ master(self, wmbsJob)
404
+ self.endTime = time.time()
405
+ return
406
+
407
+ def setInputReference(self, stepRef, **extras):
408
+ """
409
+ _setInputReference_
410
+
411
+ Add details to the input reference for the task providing
412
+ input to this task.
413
+ The reference is the step in the input task, plus
414
+ any extra information.
415
+
416
+
417
+ """
418
+ stepId = SpecUtils.stepIdentifier(stepRef)
419
+ setattr(self.data.input, "inputStep", stepId)
420
+ for key, val in viewitems(extras):
421
+ setattr(self.data.input, key, val)
422
+
423
+ return
424
+
425
+ def setInputStep(self, stepName):
426
+ """
427
+ _setInputStep_
428
+
429
+ Set the name of the step used who's output is used as input for this
430
+ task.
431
+ """
432
+ self.data.input.inputStep = stepName
433
+ return
434
+
435
+ def getInputStep(self):
436
+ """
437
+ _getInputStep_
438
+
439
+ Retrieve the name of the input step, if there is one.
440
+ """
441
+ return getattr(self.data.input, "inputStep", None)
442
+
443
+ def inputReference(self):
444
+ """
445
+ _inputReference_
446
+
447
+ Get information about the input reference for this task.
448
+
449
+ """
450
+ return self.data.input
451
+
452
+ def setFirstEventAndLumi(self, firstEvent, firstLumi):
453
+ """
454
+ _setFirstEventAndLumi_
455
+
456
+ Set an arbitrary first event and first lumi
457
+ Only used by production workflows
458
+ """
459
+
460
+ if not hasattr(self.data, "production"):
461
+ self.data._section("production")
462
+ setattr(self.data.production, "firstEvent", firstEvent)
463
+ setattr(self.data.production, "firstLumi", firstLumi)
464
+
465
+ def getFirstEvent(self):
466
+ """
467
+ _getFirstEvent_
468
+
469
+ Get first event to produce for the task
470
+ """
471
+ if hasattr(self.data, "production"):
472
+ if hasattr(self.data.production, "firstLumi"):
473
+ return self.data.production.firstEvent
474
+ return 1
475
+
476
+ def getFirstLumi(self):
477
+ """
478
+ _getFirstLumi_
479
+
480
+ Get first lumi to produce for the task
481
+ """
482
+ if hasattr(self.data, "production"):
483
+ if hasattr(self.data.production, "firstLumi"):
484
+ return self.data.production.firstLumi
485
+ return 1
486
+
487
+ def setSplittingParameters(self, **params):
488
+ """
489
+ _setSplittingParameters_
490
+
491
+ Set the job splitting parameters.
492
+ """
493
+ for key, val in viewitems(params):
494
+ setattr(self.data.input.splitting, key, val)
495
+
496
+ return
497
+
498
+ def setSplittingAlgorithm(self, algoName, **params):
499
+ """
500
+ _setSplittingAlgorithm_
501
+
502
+ Set the splitting algorithm name and arguments. Clear out any old
503
+ splitting parameters while preserving the parameters for ACDC
504
+ resubmission which are:
505
+ collectionName, filesetName, couchURL, couchDB, owner, group
506
+
507
+ This also needs to preserve the parameter we use to set the initial
508
+ LFN counter, whether or not we merge across runs and the runWhitelist:
509
+ initial_lfn_counter
510
+ merge_across_runs
511
+ runWhitelist
512
+
513
+ Preserve parameters which can be set up at request creation and if not
514
+ specified should remain unchanged, at the moment these are:
515
+ include_parents
516
+ lheInputFiles
517
+
518
+ Also preserve the performance section.
519
+ """
520
+ setACDCParams = {}
521
+ for paramName in ["collectionName", "filesetName", "couchURL",
522
+ "couchDB", "owner", "group", "initial_lfn_counter",
523
+ "merge_across_runs", "runWhitelist"]:
524
+ if hasattr(self.data.input.splitting, paramName):
525
+ setACDCParams[paramName] = getattr(self.data.input.splitting,
526
+ paramName)
527
+ preservedParams = {}
528
+ for paramName in ["lheInputFiles", "include_parents", "deterministicPileup"]:
529
+ if hasattr(self.data.input.splitting, paramName):
530
+ preservedParams[paramName] = getattr(self.data.input.splitting,
531
+ paramName)
532
+ performanceConfig = getattr(self.data.input.splitting, "performance", None)
533
+
534
+ delattr(self.data.input, "splitting")
535
+ self.data.input.section_("splitting")
536
+ self.data.input.splitting.section_("performance")
537
+
538
+ setattr(self.data.input.splitting, "algorithm", algoName)
539
+ self.setSplittingParameters(**preservedParams)
540
+ self.setSplittingParameters(**params)
541
+ self.setSplittingParameters(**setACDCParams)
542
+ if performanceConfig is not None:
543
+ self.data.input.splitting.performance = performanceConfig
544
+ return
545
+
546
+ def updateSplittingParameters(self, algoName, **params):
547
+ """
548
+ _updateSplittingAlgorithm_
549
+ :param algoName: string Algorithm name
550
+ :param params: splitting parameters
551
+ :return:
552
+
553
+ Only updates specific parameters in splitting Algorithm but doesn't remove the existing splitting parameters
554
+ """
555
+ performanceConfig = getattr(self.data.input.splitting, "performance", None)
556
+ setattr(self.data.input.splitting, "algorithm", algoName)
557
+ self.data.input.splitting.section_("performance")
558
+ self.setSplittingParameters(**params)
559
+ if performanceConfig is not None:
560
+ self.data.input.splitting.performance = performanceConfig
561
+ return
562
+
563
+ def jobSplittingAlgorithm(self):
564
+ """
565
+ _jobSplittingAlgorithm_
566
+
567
+ Retrieve the job splitting algorithm name.
568
+ """
569
+ return getattr(self.data.input.splitting, "algorithm", None)
570
+
571
+ def jobSplittingParameters(self, performance=True):
572
+ """
573
+ _jobSplittingParameters_
574
+
575
+ Retrieve the job splitting parameters. This will combine the job
576
+ splitting parameters specified in the spec with the site white list
577
+ and black list as those are passed to the job splitting code.
578
+ If required, also extract the performance parameters and pass them in the dict.
579
+ """
580
+ datadict = getattr(self.data.input, "splitting")
581
+ if performance:
582
+ splittingParams = datadict.dictionary_whole_tree_()
583
+ else:
584
+ splittingParams = datadict.dictionary_()
585
+ if "performance" in splittingParams:
586
+ del splittingParams['performance']
587
+ splittingParams["siteWhitelist"] = self.siteWhitelist()
588
+ splittingParams["siteBlacklist"] = self.siteBlacklist()
589
+ splittingParams["trustSitelists"] = self.getTrustSitelists().get('trustlists')
590
+ splittingParams["trustPUSitelists"] = self.getTrustSitelists().get('trustPUlists')
591
+
592
+ if "runWhitelist" not in splittingParams and self.inputRunWhitelist() is not None:
593
+ splittingParams["runWhitelist"] = self.inputRunWhitelist()
594
+ if "runBlacklist" not in splittingParams and self.inputRunBlacklist() is not None:
595
+ splittingParams["runBlacklist"] = self.inputRunBlacklist()
596
+
597
+ return splittingParams
598
+
599
+ def setJobResourceInformation(self, timePerEvent=None, sizePerEvent=None, memoryReq=None):
600
+ """
601
+ _setJobResourceInformation_
602
+
603
+ Set the values to estimate the required computing resources for a job,
604
+ the three key values are main memory usage, time per processing unit (e.g. time per event) and
605
+ disk usage per processing unit (e.g. size per event).
606
+ """
607
+ if self.taskType() in ["Merge", "Cleanup", "LogCollect"]:
608
+ # don't touch job requirements for these task types
609
+ return
610
+
611
+ performanceParams = getattr(self.data.input.splitting, "performance")
612
+
613
+ timePerEvent = timePerEvent.get(self.name()) if isinstance(timePerEvent, dict) else timePerEvent
614
+ sizePerEvent = sizePerEvent.get(self.name()) if isinstance(sizePerEvent, dict) else sizePerEvent
615
+ memoryReq = memoryReq.get(self.name()) if isinstance(memoryReq, dict) else memoryReq
616
+
617
+ if timePerEvent or getattr(performanceParams, "timePerEvent", None):
618
+ performanceParams.timePerEvent = timePerEvent or getattr(performanceParams, "timePerEvent")
619
+ if sizePerEvent or getattr(performanceParams, "sizePerEvent", None):
620
+ performanceParams.sizePerEvent = sizePerEvent or getattr(performanceParams, "sizePerEvent")
621
+ if memoryReq or getattr(performanceParams, "memoryRequirement", None):
622
+ performanceParams.memoryRequirement = memoryReq or getattr(performanceParams, "memoryRequirement")
623
+ # if we change memory requirements, then we must change MaxPSS as well
624
+ self.setMaxPSS(performanceParams.memoryRequirement)
625
+
626
+ return
627
+
628
+ def addGenerator(self, generatorName, **settings):
629
+ """
630
+ _addGenerator_
631
+
632
+
633
+ """
634
+ if 'generators' not in self.data.listSections_():
635
+ self.data.section_('generators')
636
+ if generatorName not in self.data.generators.listSections_():
637
+ self.data.generators.section_(generatorName)
638
+
639
+ helper = TreeHelper(getattr(self.data.generators, generatorName))
640
+ helper.addValue(settings)
641
+
642
+ return
643
+
644
+ def listGenerators(self):
645
+ """
646
+ _listGenerators_
647
+
648
+ """
649
+ generators = getattr(self.data, "generators", None)
650
+ if generators is None:
651
+ return []
652
+ return generators.listSections_()
653
+
654
+ def getGeneratorSettings(self, generatorName):
655
+ """
656
+ _getGeneratorSettings_
657
+
658
+ Extract the settings from the generator fields
659
+ """
660
+ generators = getattr(self.data, "generators", None)
661
+ if generators is None:
662
+ return {}
663
+ generator = getattr(generators, generatorName, None)
664
+ if generator is None:
665
+ return {}
666
+
667
+ confValues = TreeHelper(generator)
668
+ args = {}
669
+ tempArgs = confValues.pythoniseDict(sections=False)
670
+ for entry in tempArgs:
671
+ args[entry.split('%s.' % generatorName)[1]] = tempArgs[entry]
672
+ return args
673
+
674
+ def addInputACDC(self, serverUrl, databaseName, collectionName,
675
+ filesetName):
676
+ """
677
+ _addInputACDC_
678
+
679
+ Set the ACDC input information for this task.
680
+ """
681
+ self.data.input.section_("acdc")
682
+ self.data.input.acdc.server = serverUrl
683
+ self.data.input.acdc.database = databaseName
684
+ self.data.input.acdc.collection = collectionName
685
+ self.data.input.acdc.fileset = filesetName
686
+ return
687
+
688
+ def getInputACDC(self):
689
+ """
690
+ _getInputACDC_
691
+
692
+ Retrieve the ACDC input configuration.
693
+ """
694
+ if not hasattr(self.data.input, "acdc"):
695
+ return None
696
+
697
+ return {"server": self.data.input.acdc.server,
698
+ "collection": self.data.input.acdc.collection,
699
+ "fileset": self.data.input.acdc.fileset,
700
+ "database": self.data.input.acdc.database}
701
+
702
+ def addInputDataset(self, **options):
703
+ """
704
+ _addInputDataset_
705
+
706
+ Add details of an input dataset to this Task.
707
+ This dataset will be used as input for the first step
708
+ in the task
709
+
710
+ options should contain at least:
711
+ - name - dataset name
712
+ - primary - primary dataset name
713
+ - processed - processed dataset name
714
+ - tier - data tier name
715
+
716
+ optional args:
717
+ - dbsurl - dbs url if not global
718
+ - block_whitelist - list of whitelisted fileblocks
719
+ - block_blacklist - list of blacklisted fileblocks
720
+ - run_whitelist - list of whitelist runs
721
+ - run_blacklist - list of blacklist runs
722
+ """
723
+ self.data.input.section_("dataset")
724
+ self.data.input.dataset.name = None
725
+ self.data.input.dataset.dbsurl = None
726
+ self.data.input.dataset.section_("blocks")
727
+ self.data.input.dataset.blocks.whitelist = []
728
+ self.data.input.dataset.blocks.blacklist = []
729
+ self.data.input.dataset.section_("runs")
730
+ self.data.input.dataset.runs.whitelist = []
731
+ self.data.input.dataset.runs.blacklist = []
732
+
733
+ try:
734
+ self.data.input.dataset.primary = options.pop('primary')
735
+ self.data.input.dataset.processed = options.pop('processed')
736
+ self.data.input.dataset.tier = options.pop('tier')
737
+ except KeyError:
738
+ raise RuntimeError("Primary, Processed and Tier must be set")
739
+
740
+ for opt, arg in viewitems(options):
741
+ if opt == 'block_blacklist':
742
+ self.setInputBlockBlacklist(arg)
743
+ elif opt == 'block_whitelist':
744
+ self.setInputBlockWhitelist(arg)
745
+ elif opt == 'dbsurl':
746
+ self.data.input.dataset.dbsurl = arg
747
+ elif opt == "run_whitelist":
748
+ self.setInputRunWhitelist(arg)
749
+ elif opt == "run_blacklist":
750
+ self.setInputRunBlacklist(arg)
751
+ else:
752
+ setattr(self.data.input.dataset, opt, arg)
753
+
754
+ return
755
+
756
+ def setInputBlockWhitelist(self, blockWhitelist):
757
+ """
758
+ _setInputBlockWhitelist_
759
+
760
+ Set the block white list for the input dataset. This must be called
761
+ after setInputDataset().
762
+ """
763
+ self.data.input.dataset.blocks.whitelist = blockWhitelist
764
+ return
765
+
766
+ def inputBlockWhitelist(self):
767
+ """
768
+ _inputBlockWhitelist_
769
+
770
+ Retrieve the block white list for the input dataset if it exists, none
771
+ otherwise.
772
+ """
773
+ if hasattr(self.data.input, "dataset"):
774
+ return self.data.input.dataset.blocks.whitelist
775
+ return None
776
+
777
+ def setInputBlockBlacklist(self, blockBlacklist):
778
+ """
779
+ _setInputBlockBlacklist_
780
+
781
+ Set the block black list for the input dataset. This must be called
782
+ after setInputDataset().
783
+ """
784
+ self.data.input.dataset.blocks.blacklist = blockBlacklist
785
+ return
786
+
787
+ def inputBlockBlacklist(self):
788
+ """
789
+ _inputBlockBlacklist_
790
+
791
+ Retrieve the block black list for the input dataset if it exsits, none
792
+ otherwise.
793
+ """
794
+ if hasattr(self.data.input, "dataset"):
795
+ return self.data.input.dataset.blocks.blacklist
796
+ return None
797
+
798
+ def setInputRunWhitelist(self, runWhitelist):
799
+ """
800
+ _setInputRunWhitelist_
801
+
802
+ Set the run white list for the input dataset. This must be called
803
+ after setInputDataset().
804
+ """
805
+ self.data.input.dataset.runs.whitelist = runWhitelist
806
+ return
807
+
808
+ def inputRunWhitelist(self):
809
+ """
810
+ _inputRunWhitelist_
811
+
812
+ Retrieve the run white list for the input dataset if it exists, none
813
+ otherwise.
814
+ """
815
+ if hasattr(self.data.input, "dataset"):
816
+ return self.data.input.dataset.runs.whitelist
817
+ return None
818
+
819
+ def setInputRunBlacklist(self, runBlacklist):
820
+ """
821
+ _setInputRunBlacklist_
822
+
823
+ Set the run black list for the input dataset. This must be called
824
+ after setInputDataset().
825
+ """
826
+ self.data.input.dataset.runs.blacklist = runBlacklist
827
+ return
828
+
829
+ def inputRunBlacklist(self):
830
+ """
831
+ _inputRunBlacklist_
832
+
833
+ Retrieve the run black list for the input dataset if it exists, none
834
+ otherwise.
835
+ """
836
+ if hasattr(self.data.input, "dataset"):
837
+ return self.data.input.dataset.runs.blacklist
838
+ return None
839
+
840
+ def addProduction(self, **options):
841
+ """
842
+ _addProduction_
843
+
844
+ Add details of production job related information.
845
+
846
+ options should contain at least:
847
+ TODO: Not sure what is necessary data ask Dave
848
+ optional
849
+ - totalevents - total events in dataset
850
+
851
+ """
852
+ if not hasattr(self.data, "production"):
853
+ self.data.section_("production")
854
+
855
+ for opt, arg in viewitems(options):
856
+ setattr(self.data.production, opt, arg)
857
+
858
+ def inputDataset(self):
859
+ """
860
+ _inputDataset_
861
+
862
+ Get the input.dataset structure from this task
863
+
864
+ """
865
+ return getattr(self.data.input, "dataset", None)
866
+
867
+ def getInputDatasetPath(self):
868
+ """
869
+ _getInputDatasetPath_
870
+
871
+ Get the input dataset path because it's useful
872
+ """
873
+
874
+ if hasattr(self.data.input, 'dataset'):
875
+ return getattr(self.data.input.dataset, 'name', None)
876
+
877
+ return None
878
+
879
+ def setInputPileupDatasets(self, dsetName):
880
+ """
881
+ _setInputPileupDatasets_
882
+
883
+ Create a list of pileup datasets to be used by this task (possible
884
+ multiple CMSSW steps)
885
+ """
886
+ self.data.input.section_("pileup")
887
+ if not hasattr(self.data.input.pileup, "datasets"):
888
+ self.data.input.pileup.datasets = []
889
+
890
+ if isinstance(dsetName, list):
891
+ self.data.input.pileup.datasets.extend(dsetName)
892
+ elif isinstance(dsetName, (newstr, bytes)):
893
+ self.data.input.pileup.datasets.append(dsetName)
894
+ else:
895
+ raise ValueError("Pileup dataset must be either a list or a string (unicode or bytes)")
896
+ # make the list unique
897
+ self.data.input.pileup.datasets = list(set(self.data.input.pileup.datasets))
898
+
899
+ def getInputPileupDatasets(self):
900
+ """
901
+ _getInputPileupDatasets_
902
+
903
+ Get a list of the input pileup dataset name(s) for this task.
904
+ """
905
+ if hasattr(self.data.input, 'pileup'):
906
+ return getattr(self.data.input.pileup, 'datasets', [])
907
+ return []
908
+
909
+ def siteWhitelist(self):
910
+ """
911
+ _siteWhitelist_
912
+
913
+ Accessor for the site white list for the task.
914
+ """
915
+ return self.data.constraints.sites.whitelist
916
+
917
+ def setSiteWhitelist(self, siteWhitelist):
918
+ """
919
+ _setSiteWhitelist_
920
+
921
+ Set the set white list for the task.
922
+ """
923
+ self.data.constraints.sites.whitelist = siteWhitelist
924
+ return
925
+
926
+ def siteBlacklist(self):
927
+ """
928
+ _siteBlacklist_
929
+
930
+ Accessor for the site white list for the task.
931
+ """
932
+ return self.data.constraints.sites.blacklist
933
+
934
+ def setSiteBlacklist(self, siteBlacklist):
935
+ """
936
+ _setSiteBlacklist_
937
+
938
+ Set the site black list for the task.
939
+ """
940
+ self.data.constraints.sites.blacklist = siteBlacklist
941
+ return
942
+
943
+ def getTrustSitelists(self):
944
+ """
945
+ _getTrustSitelists_
946
+
947
+ Get the input and pileup flag for 'trust site lists' in the task.
948
+ """
949
+ # handle backward compatibility for the request which doesn't contain trustPUlists
950
+ return {'trustlists': getattr(self.data.constraints.sites, 'trustlists', False),
951
+ 'trustPUlists': getattr(self.data.constraints.sites, 'trustPUlists', False)}
952
+
953
+ def setTrustSitelists(self, trustSitelists, trustPUSitelists):
954
+ """
955
+ _setTrustSitelists_
956
+
957
+ Set the input and the pileup flags for 'trust site lists' in the task.
958
+ """
959
+ self.data.constraints.sites.trustlists = trustSitelists
960
+ self.data.constraints.sites.trustPUlists = trustPUSitelists
961
+ return
962
+
963
+ def listOutputDatasetsAndModules(self):
964
+ """
965
+ _listOutputDatasetsAndModules_
966
+
967
+ Get the output datasets per output module for this task
968
+ """
969
+ outputDatasets = []
970
+ for stepName in self.listAllStepNames():
971
+ stepHelper = self.getStepHelper(stepName)
972
+
973
+ if not getattr(stepHelper.data.output, "keep", True):
974
+ continue
975
+
976
+ if stepHelper.stepType() == "CMSSW":
977
+ for outputModuleName in stepHelper.listOutputModules():
978
+ outputModule = stepHelper.getOutputModule(outputModuleName)
979
+ outputDataset = "/%s/%s/%s" % (outputModule.primaryDataset,
980
+ outputModule.processedDataset,
981
+ outputModule.dataTier)
982
+ outputDatasets.append({"outputModule": outputModuleName,
983
+ "outputDataset": outputDataset})
984
+
985
+ return outputDatasets
986
+
987
+ def setSubscriptionInformation(self, custodialSites=None, nonCustodialSites=None,
988
+ priority="Low", primaryDataset=None,
989
+ useSkim=False, isSkim=False,
990
+ dataTier=None, deleteFromSource=False,
991
+ datasetLifetime=None):
992
+ """
993
+ _setSubscriptionsInformation_
994
+
995
+ Set the subscription information for this task's datasets
996
+ The subscriptions information is structured as follows:
997
+ data.subscriptions.outputSubs is a list with the output section names (1 per dataset)
998
+ data.subscriptions.<outputSection>.dataset
999
+ data.subscriptions.<outputSection>.outputModule
1000
+ data.subscriptions.<outputSection>.custodialSites
1001
+ data.subscriptions.<outputSection>.nonCustodialSites
1002
+ data.subscriptions.<outputSection>.priority
1003
+
1004
+ The filters arguments allow to define a dataTier and primaryDataset. Only datasets
1005
+ matching those values will be configured.
1006
+ """
1007
+ custodialSites = custodialSites or []
1008
+ nonCustodialSites = nonCustodialSites or []
1009
+
1010
+ if not hasattr(self.data, "subscriptions"):
1011
+ self.data.section_("subscriptions")
1012
+ self.data.subscriptions.outputSubs = []
1013
+
1014
+ outputDatasets = self.listOutputDatasetsAndModules()
1015
+
1016
+ for entry in enumerate(outputDatasets, start=1):
1017
+ subSectionName = "output%s" % entry[0]
1018
+ outputDataset = entry[1]["outputDataset"]
1019
+ outputModule = entry[1]["outputModule"]
1020
+
1021
+ dsSplit = outputDataset.split('/')
1022
+ primDs = dsSplit[1]
1023
+ tier = dsSplit[3]
1024
+ procDsSplit = dsSplit[2].split('-')
1025
+ skim = (len(procDsSplit) == 4)
1026
+
1027
+ if primaryDataset and primDs != primaryDataset:
1028
+ continue
1029
+ if useSkim and isSkim != skim:
1030
+ continue
1031
+ if dataTier and tier != dataTier:
1032
+ continue
1033
+
1034
+ self.data.subscriptions.outputSubs.append(subSectionName)
1035
+ outputSection = self.data.subscriptions.section_(subSectionName)
1036
+ outputSection.dataset = outputDataset
1037
+ outputSection.outputModule = outputModule
1038
+ outputSection.custodialSites = custodialSites
1039
+ outputSection.nonCustodialSites = nonCustodialSites
1040
+ outputSection.priority = priority
1041
+ outputSection.deleteFromSource = deleteFromSource
1042
+ outputSection.datasetLifetime = datasetLifetime
1043
+
1044
+ return
1045
+
1046
+ def getSubscriptionInformation(self):
1047
+ """
1048
+ _getSubscriptionInformation_
1049
+
1050
+ Get the subscription configuration for the task
1051
+ return a dictionary with the following structure
1052
+ {<dataset> : {CustodialSites : [],
1053
+ NonCustodialSites : [],
1054
+ Priority : "Low"
1055
+ }
1056
+ }
1057
+ """
1058
+ if not hasattr(self.data, "subscriptions"):
1059
+ return {}
1060
+
1061
+ subKeyName = 'outputSubs'
1062
+
1063
+ subInformation = {}
1064
+ for outputSub in getattr(self.data.subscriptions, subKeyName):
1065
+ outputSection = getattr(self.data.subscriptions, outputSub)
1066
+ dataset = outputSection.dataset
1067
+
1068
+ subInformation[dataset] = {"CustodialSites": outputSection.custodialSites,
1069
+ "NonCustodialSites": outputSection.nonCustodialSites,
1070
+ "Priority": outputSection.priority,
1071
+ # These might not be present in all specs
1072
+ "DeleteFromSource": getattr(outputSection, "deleteFromSource", False),
1073
+ # Spec assigned for T0 ContainerRules
1074
+ "DatasetLifetime": getattr(outputSection, "datasetLifetime", 0)}
1075
+ return subInformation
1076
+
1077
+ def parentProcessingFlag(self):
1078
+ """
1079
+ _parentProcessingFlag_
1080
+
1081
+ accessor for parentProcessing information (two file input)
1082
+ """
1083
+ return self.jobSplittingParameters().get("include_parents", False)
1084
+
1085
+ def totalEvents(self):
1086
+ """
1087
+ _totalEvents_
1088
+
1089
+ accessor for total events in the given dataset
1090
+ """
1091
+ # TODO: save the total events for the production job
1092
+ return int(self.data.production.totalEvents)
1093
+ # return self.data.input.dataset.totalEvents
1094
+
1095
+ def dbsUrl(self):
1096
+ """
1097
+ _dbsUrl_
1098
+ if local dbs url is set for the task, return it
1099
+ otherwise return None
1100
+ """
1101
+ if getattr(self.data.input, "dataset", False):
1102
+ return getattr(self.data.input.dataset, "dbsurl", None)
1103
+ else:
1104
+ return None
1105
+
1106
+ def setTaskType(self, taskType):
1107
+ """
1108
+ Set the type field of this task
1109
+ """
1110
+ self.data.taskType = taskType
1111
+
1112
+ def taskType(self):
1113
+ """
1114
+ _taskType_
1115
+
1116
+ Get the task Type setting
1117
+ """
1118
+ return self.data.taskType
1119
+
1120
+ def completeTask(self, jobLocation, reportName):
1121
+ """
1122
+ _completeTask_
1123
+
1124
+ Combine all the logs from all the steps in the task to a single log
1125
+
1126
+ If necessary, output to Dashboard
1127
+ """
1128
+ from WMCore.FwkJobReport.Report import Report
1129
+
1130
+ finalReport = Report()
1131
+ # We left the master report at the pilot scratch area level
1132
+ testPath = os.path.join(jobLocation, '../../', reportName)
1133
+ logging.info("Looking for master report at %s", testPath)
1134
+ if os.path.exists(testPath):
1135
+ logging.info(" found it!")
1136
+ # If a report already exists, we load it and
1137
+ # append our steps to it
1138
+ finalReport.load(testPath)
1139
+ taskSteps = self.listAllStepNames()
1140
+ for taskStep in taskSteps:
1141
+ reportPath = os.path.join(jobLocation, taskStep, "Report.pkl")
1142
+ logging.info("Looking for a taskStep report at %s", reportPath)
1143
+ if os.path.isfile(reportPath):
1144
+ logging.info(" found it!")
1145
+ stepReport = Report()
1146
+ stepReport.unpersist(reportPath, taskStep)
1147
+ finalReport.setStep(taskStep, stepReport.retrieveStep(taskStep))
1148
+ logURL = stepReport.getLogURL()
1149
+ if logURL:
1150
+ finalReport.setLogURL(logURL)
1151
+ else:
1152
+ msg = " failed to find it."
1153
+ msg += "Files in the directory are:\n%s" % os.listdir(os.path.join(jobLocation, taskStep))
1154
+ logging.error(msg)
1155
+ # Then we have a missing report
1156
+ # This should raise an alarm bell, as per Steve's request
1157
+ # TODO: Change error code
1158
+ finalReport.addStep(reportname=taskStep, status=1)
1159
+ finalReport.addError(stepName=taskStep, exitCode=99996, errorType="ReportManipulatingError",
1160
+ errorDetails="Failed to find a step report for %s!" % taskStep)
1161
+
1162
+ finalReport.data.completed = True
1163
+ finalReport.persist(reportName)
1164
+
1165
+ return finalReport
1166
+
1167
+ def taskLogBaseLFN(self):
1168
+ """
1169
+ _taskLogBaseLFN_
1170
+
1171
+ Get the base LFN for the task's log archive file.
1172
+ """
1173
+ return getattr(self.data, "logBaseLFN", "/store/temp/WMAgent/unmerged")
1174
+
1175
+ def setTaskLogBaseLFN(self, logBaseLFN):
1176
+ """
1177
+ _setTaskLogBaseLFN_
1178
+
1179
+ Set the base LFN for the task's log archive file.
1180
+ """
1181
+ self.data.logBaseLFN = logBaseLFN
1182
+ return
1183
+
1184
+ def addNotification(self, target):
1185
+ """
1186
+ _addNotification_
1187
+
1188
+ Add a target to be notified on workflow completion
1189
+ """
1190
+
1191
+ self.data.notifications.targets.append(target)
1192
+ return
1193
+
1194
+ def getNotifications(self):
1195
+ """
1196
+ _getNotifications_
1197
+
1198
+ Get all targets for notification at workflow completion
1199
+ """
1200
+
1201
+ return self.data.notifications.targets
1202
+
1203
+ def _setPerformanceMonitorConfig(self):
1204
+ """
1205
+ if config section for the PerformanceMonitor. If not set, it will set one
1206
+ """
1207
+ if self.monitoring is not None:
1208
+ return
1209
+
1210
+ self.monitoring = self.data.section_("watchdog")
1211
+ if not hasattr(self.data.watchdog, 'monitors'):
1212
+ self.data.watchdog.monitors = []
1213
+ if 'PerformanceMonitor' not in self.monitoring.monitors:
1214
+ self.monitoring.monitors.append('PerformanceMonitor')
1215
+ self.monitoring.section_("PerformanceMonitor")
1216
+ return
1217
+
1218
+ def setMaxPSS(self, maxPSS):
1219
+ """
1220
+ _setMaxPSS_
1221
+
1222
+ Set MaxPSS performance monitoring for this task.
1223
+ :param maxPSS: maximum Proportional Set Size (PSS) memory consumption in MiB
1224
+ """
1225
+ if self.taskType() in ["Merge", "Cleanup", "LogCollect"]:
1226
+ # keep the default settings (from StdBase) for these task types
1227
+ return
1228
+
1229
+ if isinstance(maxPSS, dict):
1230
+ maxPSS = maxPSS.get(self.name(), None)
1231
+
1232
+ if maxPSS:
1233
+ self._setPerformanceMonitorConfig()
1234
+ self.monitoring.PerformanceMonitor.maxPSS = int(maxPSS)
1235
+ for task in self.childTaskIterator():
1236
+ task.setMaxPSS(maxPSS)
1237
+ return
1238
+
1239
+ def setPerformanceMonitor(self, softTimeout=None, gracePeriod=None):
1240
+ """
1241
+ _setPerformanceMonitor_
1242
+
1243
+ Set/Update the performance monitor options for the task
1244
+ """
1245
+ # make sure there is a PerformanceMonitor section in the task
1246
+ self._setPerformanceMonitorConfig()
1247
+
1248
+ if softTimeout:
1249
+ self.monitoring.PerformanceMonitor.softTimeout = int(softTimeout)
1250
+ if gracePeriod:
1251
+ self.monitoring.PerformanceMonitor.hardTimeout = int(softTimeout + gracePeriod)
1252
+
1253
+ return
1254
+
1255
+ def getSwVersion(self, allSteps=False):
1256
+ """
1257
+ _getSwVersion_
1258
+
1259
+ Get the CMSSW version for the first CMSSW step in this task.
1260
+ :param allSteps: set it to True to retrieve a list of CMSSW releases
1261
+ used in this task
1262
+ :return: a string with the release name or a list of releases if allSteps is True.
1263
+ """
1264
+ versions = []
1265
+ for stepName in self.listAllStepNames():
1266
+ stepHelper = self.getStepHelper(stepName)
1267
+ if stepHelper.stepType() in ["CMSSW", "LogCollect"]:
1268
+ if not allSteps:
1269
+ return stepHelper.getCMSSWVersion()
1270
+ else:
1271
+ versions.append(stepHelper.getCMSSWVersion())
1272
+ return versions
1273
+
1274
+ def getScramArch(self, allSteps=False):
1275
+ """
1276
+ _getScramArch_
1277
+
1278
+ Get the scram architecture for the first CMSSW step of workload.
1279
+ Set allSteps to true to retrieve all the scramArchs used in this task.
1280
+ """
1281
+ scrams = []
1282
+ for stepName in self.listAllStepNames():
1283
+ stepHelper = self.getStepHelper(stepName)
1284
+ if stepHelper.stepType() in ["CMSSW", "LogCollect"]:
1285
+ if not allSteps:
1286
+ return stepHelper.getScramArch()
1287
+ else:
1288
+ scrams.append(stepHelper.getScramArch())
1289
+ return scrams
1290
+
1291
+ def setPrimarySubType(self, subType):
1292
+ """
1293
+ _setPrimarySubType_
1294
+
1295
+ Set the subType that should be used by WorkQueue for the
1296
+ primary subscription
1297
+ """
1298
+
1299
+ self.data.parameters.primarySubType = subType
1300
+ return
1301
+
1302
+ def getPrimarySubType(self):
1303
+ """
1304
+ _getPrimarySubType_
1305
+
1306
+ Retrieve the primary subType
1307
+ If not available, use the taskType
1308
+ """
1309
+
1310
+ return getattr(self.data.parameters, 'primarySubType',
1311
+ self.taskType())
1312
+
1313
+ def getConfigCacheIDs(self):
1314
+ """
1315
+ _getConfigCacheIDs_
1316
+
1317
+ Search constituent steps for ConfigCacheID
1318
+ """
1319
+
1320
+ IDs = []
1321
+ for stepName in self.listAllStepNames():
1322
+ stepHelper = self.getStepHelper(stepName)
1323
+ ID = stepHelper.getConfigCacheID()
1324
+ if ID:
1325
+ IDs.append(ID)
1326
+ return IDs
1327
+
1328
+ def getPhysicsTaskType(self):
1329
+ """
1330
+ Return the physics Task type
1331
+ :return: str
1332
+ """
1333
+ return getattr(self.data, 'physicsTaskType', None)
1334
+
1335
+ def getStepPhysicsTypes(self):
1336
+ """
1337
+ Get the physics types for all cmsRun steps
1338
+ :return: list
1339
+ """
1340
+ types = []
1341
+ for stepName in self.listAllStepNames(cmsRunOnly=True):
1342
+ stepHelper = self.getStepHelper(stepName)
1343
+ stepType = stepHelper.getPhysicsType()
1344
+ if stepType:
1345
+ types.append(stepType)
1346
+ else:
1347
+ types.append("UNKNOWN")
1348
+ return types
1349
+
1350
+ def setPhysicsTaskType(self):
1351
+ """
1352
+ Set the physics task type based on the steps parameter
1353
+ for a step.
1354
+ We basically expand the standard "Production/Processing" taskType to
1355
+ detail the process better.
1356
+ For MC: "Production" or "Processing" to ["GENSIM", "GEN", "DIGI", "RECO", "DIGIRECO", "MINIAOD"]
1357
+ For Data: "Processing" to "Dataprocessing"
1358
+ :return: str
1359
+ """
1360
+ physicsTaskType = "UNKNOWN"
1361
+ if self.taskType() in ("Processing", "Production"):
1362
+ stepTypes = self.getStepPhysicsTypes()
1363
+ if "DataProcessing" in stepTypes:
1364
+ # For data, return a single DataProcessing step
1365
+ physicsTaskType = "DataProcessing"
1366
+ else:
1367
+ # For MC, join all physics steps
1368
+ physicsTaskType = ",".join(stepTypes)
1369
+ else:
1370
+ # Other task types are not physics types
1371
+ physicsTaskType = None
1372
+
1373
+ self.data.physicsTaskType = physicsTaskType
1374
+
1375
+ def setProcessingVersion(self, procVer, parentProcessingVersion=0, stepChainMap=False):
1376
+ """
1377
+ _setProcessingVersion_
1378
+
1379
+ Set the task processing version
1380
+ """
1381
+ if isinstance(procVer, dict) and stepChainMap:
1382
+ taskProcVer = self._getStepValue(procVer, parentProcessingVersion)
1383
+ self._setStepProperty("ProcessingVersion", procVer, stepChainMap)
1384
+ elif isinstance(procVer, dict):
1385
+ taskProcVer = procVer.get(self.name(), parentProcessingVersion)
1386
+ if taskProcVer is None:
1387
+ for taskname in procVer:
1388
+ if taskname in self.name():
1389
+ taskProcVer = procVer[taskname]
1390
+ else:
1391
+ taskProcVer = procVer
1392
+
1393
+ self.data.parameters.processingVersion = int(taskProcVer)
1394
+ for task in self.childTaskIterator():
1395
+ task.setProcessingVersion(procVer, taskProcVer, stepChainMap)
1396
+ return
1397
+
1398
+ def getProcessingVersion(self):
1399
+ """
1400
+ _getProcessingVersion_
1401
+
1402
+ Get the task processing version
1403
+ """
1404
+ return getattr(self.data.parameters, 'processingVersion', 0)
1405
+
1406
+ def setProcessingString(self, procString, parentProcessingString=None, stepChainMap=False):
1407
+ """
1408
+ _setProcessingString_
1409
+
1410
+ Set the task processing string
1411
+ """
1412
+ if isinstance(procString, dict) and stepChainMap:
1413
+ taskProcString = self._getStepValue(procString, parentProcessingString)
1414
+ self._setStepProperty("ProcessingString", procString, stepChainMap)
1415
+ elif isinstance(procString, dict):
1416
+ taskProcString = procString.get(self.name(), parentProcessingString)
1417
+ if taskProcString is None:
1418
+ for taskname in procString:
1419
+ if taskname in self.name():
1420
+ taskProcString = procString[taskname]
1421
+ else:
1422
+ taskProcString = procString
1423
+
1424
+ self.data.parameters.processingString = taskProcString
1425
+
1426
+ for task in self.childTaskIterator():
1427
+ task.setProcessingString(procString, taskProcString, stepChainMap)
1428
+ return
1429
+
1430
+ def getProcessingString(self):
1431
+ """
1432
+ _getProcessingString_
1433
+
1434
+ Get the task processing string
1435
+ """
1436
+ return getattr(self.data.parameters, 'processingString', None)
1437
+
1438
+ def getCMSSWVersionsWithMergeTask(self):
1439
+ """
1440
+ _getCMSSWVersionsWithMergeTask_
1441
+
1442
+ Get the all the cmssw versions for this task plus first generation merge task cmssw version.
1443
+ This will be used to validate and check in the script.
1444
+ Merge cmssw version should be the same as processing version
1445
+ """
1446
+ versions = set()
1447
+ for stepName in self.listAllStepNames():
1448
+
1449
+ stepHelper = self.getStepHelper(stepName)
1450
+ if stepHelper.stepType() != "CMSSW":
1451
+ continue
1452
+ version = stepHelper.getCMSSWVersion()
1453
+ versions.add(version)
1454
+
1455
+ for task in self.childTaskIterator():
1456
+ if task.taskType() == "Merge":
1457
+ for stepName in task.listAllStepNames():
1458
+
1459
+ stepHelper = task.getStepHelper(stepName)
1460
+ if stepHelper.stepType() != "CMSSW":
1461
+ continue
1462
+ version = stepHelper.getCMSSWVersion()
1463
+ versions.add(version)
1464
+
1465
+ return versions
1466
+
1467
+ def setNumberOfCores(self, cores, nStreams):
1468
+ """
1469
+ _setNumberOfCores_
1470
+
1471
+ Set number of cores and event streams for each CMSSW step in this task and its children
1472
+ """
1473
+ if self.taskType() in ["Merge", "Harvesting", "Cleanup", "LogCollect"]:
1474
+ return
1475
+
1476
+ if isinstance(cores, dict):
1477
+ taskCores = cores.get(self.name())
1478
+ else:
1479
+ taskCores = cores
1480
+
1481
+ if isinstance(nStreams, dict):
1482
+ taskStreams = nStreams.get(self.name(), 0)
1483
+ else:
1484
+ taskStreams = nStreams
1485
+
1486
+ if taskCores:
1487
+ for stepName in self.listAllStepNames():
1488
+ stepHelper = self.getStepHelper(stepName)
1489
+ if stepHelper.stepType() == "CMSSW":
1490
+ stepHelper.setNumberOfCores(taskCores, taskStreams)
1491
+
1492
+ for task in self.childTaskIterator():
1493
+ task.setNumberOfCores(cores, nStreams)
1494
+
1495
+ return
1496
+
1497
+ def getNumberOfCores(self):
1498
+ """
1499
+ Retrieves the number of cores for this task.
1500
+ If it's a multi-step task, it returns only the greatest value
1501
+ :return: an integer with the number of cores required by this task
1502
+ """
1503
+ maxCores = 1
1504
+ for stepName in self.listAllStepNames():
1505
+ stepHelper = self.getStep(stepName)
1506
+ maxCores = max(maxCores, stepHelper.getNumberOfCores())
1507
+ return maxCores
1508
+
1509
+ def setTaskGPUSettings(self, requiresGPU, gpuParams):
1510
+ """
1511
+ Setter method for the GPU settings, applied to this Task object and
1512
+ all underneath CMSSW type step object.
1513
+ :param requiresGPU: string defining whether GPUs are needed. For TaskChains, it
1514
+ could be a dictionary key'ed by the taskname.
1515
+ :param gpuParams: GPU settings. A JSON encoded object, from either a None object
1516
+ or a dictionary. For TaskChains, it could be a dictionary key'ed by the taskname
1517
+ :return: nothing, the workload spec is updated in place.
1518
+ """
1519
+ # these job types shall not have these settings
1520
+ if self.taskType() in ["Merge", "Harvesting", "Cleanup", "LogCollect"]:
1521
+ return
1522
+
1523
+ # default values come from StdBase
1524
+ if isinstance(requiresGPU, dict):
1525
+ thisTaskGPU = requiresGPU.get(self.name(), "forbidden")
1526
+ else:
1527
+ thisTaskGPU = requiresGPU
1528
+
1529
+ decodedGpuParams = json.loads(gpuParams)
1530
+ if self.name() in decodedGpuParams:
1531
+ thisTaskGPUParams = decodedGpuParams[self.name()]
1532
+ else:
1533
+ thisTaskGPUParams = decodedGpuParams
1534
+
1535
+ for stepName in self.listAllStepNames():
1536
+ stepHelper = self.getStepHelper(stepName)
1537
+ if stepHelper.stepType() == "CMSSW":
1538
+ stepHelper.setGPUSettings(thisTaskGPU, thisTaskGPUParams)
1539
+
1540
+ for task in self.childTaskIterator():
1541
+ task.setTaskGPUSettings(requiresGPU, gpuParams)
1542
+
1543
+ return
1544
+
1545
+ def getRequiresGPU(self):
1546
+ """
1547
+ Return whether this task is supposed to use GPUs or not.
1548
+ If it's a multi-step task, decision follows this order:
1549
+ 1. "required"
1550
+ 2. "optional"
1551
+ 3. "forbidden"
1552
+ :return: a string (default to "forbidden")
1553
+ """
1554
+ requiresGPU = set(["forbidden"])
1555
+ for stepName in self.listAllStepNames():
1556
+ stepHelper = self.getStep(stepName)
1557
+ if stepHelper.stepType() == "CMSSW" and stepHelper.getGPURequired():
1558
+ requiresGPU.add(stepHelper.getGPURequired())
1559
+
1560
+ # now decide what value has higher weight
1561
+ if len(requiresGPU) == 1:
1562
+ return requiresGPU.pop()
1563
+ elif "required" in requiresGPU:
1564
+ return "required"
1565
+ elif "optional" in requiresGPU:
1566
+ return "optional"
1567
+ else:
1568
+ return "forbidden"
1569
+
1570
+ def getGPURequirements(self):
1571
+ """
1572
+ Return the GPU requirements for this task.
1573
+ If it's a multi-step task, the first step with a meaningful
1574
+ dictionary value will be returned
1575
+ :return: a dictionary with the GPU requirements for this task
1576
+ """
1577
+ gpuRequirements = {}
1578
+ for stepName in sorted(self.listAllStepNames()):
1579
+ stepHelper = self.getStep(stepName)
1580
+ if stepHelper.stepType() == "CMSSW" and stepHelper.getGPURequirements():
1581
+ return stepHelper.getGPURequirements()
1582
+ return gpuRequirements
1583
+
1584
+ def _getStepValue(self, keyDict, defaultValue):
1585
+ """
1586
+ __getStepValue_
1587
+
1588
+ Maps this taskName - in somehow a hacky way - to a 'StepName' value
1589
+ that should exist in a StepChain request. Used only on tasks that have
1590
+ output module
1591
+ :param keyDict: a dict with either AcqEra/ProcStr/ProcVer key/value pairs,
1592
+ where the key corresponds to the StepName
1593
+ """
1594
+ if self.taskType() == "Merge":
1595
+ extractedTaskName = self.name().split("Merge")[0]
1596
+ value = keyDict.get(extractedTaskName)
1597
+ elif self.taskType() in ["Production", "Processing"]:
1598
+ value = keyDict.get(self.name())
1599
+ else:
1600
+ value = defaultValue
1601
+
1602
+ return value
1603
+
1604
+ def _setStepProperty(self, propertyName, propertyDict, stepMap):
1605
+ """
1606
+ For StepChain workloads, we also need to set AcqEra/ProcStr/ProcVer
1607
+ at the WMStep level, such that we can properly map different cmsRun
1608
+ steps - within the same task - to different meta data information.
1609
+ :param propertyName: the name of the property to set at step level
1610
+ :param propertyDict: a dictionary mapping StepName to its value
1611
+ :param stepMap: map between step name, step number and cmsRun number,
1612
+ same as returned from the workload getStepMapping
1613
+ """
1614
+ propMethodMap = {"AcquisitionEra": "setAcqEra",
1615
+ "ProcessingString": "setProcStr",
1616
+ "ProcessingVersion": "setProcStr"}
1617
+
1618
+ if self.taskType() not in ["Production", "Processing"]:
1619
+ # then there is no need to set anything, single cmsRun step at most
1620
+ return
1621
+
1622
+ for stepName, stepValues in viewitems(stepMap):
1623
+ cmsRunNum = stepValues[1]
1624
+ stepHelper = self.getStepHelper(cmsRunNum)
1625
+ callableMethod = getattr(stepHelper, propMethodMap[propertyName])
1626
+ callableMethod(propertyDict[stepName])
1627
+
1628
+ def setAcquisitionEra(self, era, parentAcquisitionEra=None, stepChainMap=False):
1629
+ """
1630
+ _setAcquistionEra_
1631
+
1632
+ Set the task acquisition era
1633
+ """
1634
+
1635
+ if isinstance(era, dict) and stepChainMap:
1636
+ taskEra = self._getStepValue(era, parentAcquisitionEra)
1637
+ self._setStepProperty("AcquisitionEra", era, stepChainMap)
1638
+ elif isinstance(era, dict):
1639
+ taskEra = era.get(self.name(), parentAcquisitionEra)
1640
+ if taskEra is None:
1641
+ # We cannot properly set AcqEra for ACDC of TaskChain Merge
1642
+ # failures, so we should look up for a similar taskname in
1643
+ # the acqera dict passed from the requestor
1644
+ for taskname in era:
1645
+ if taskname in self.name():
1646
+ taskEra = era[taskname]
1647
+ else:
1648
+ taskEra = era
1649
+
1650
+ self.data.parameters.acquisitionEra = taskEra
1651
+
1652
+ for task in self.childTaskIterator():
1653
+ task.setAcquisitionEra(era, taskEra, stepChainMap)
1654
+ return
1655
+
1656
+ def getAcquisitionEra(self):
1657
+ """
1658
+ _getAcquisitionEra_
1659
+
1660
+ Get the task acquisition era.
1661
+ """
1662
+ return getattr(self.data.parameters, 'acquisitionEra', None)
1663
+
1664
+ def setCampaignName(self, campaign):
1665
+ """
1666
+ Set the campaign name of this task
1667
+ :param campaign: str, name of the campaign to be defined
1668
+ """
1669
+ self.data.campaignName = campaign
1670
+
1671
+ def getCampaignName(self):
1672
+ """
1673
+ Get the task campaign name
1674
+ :return: str, campaign name value
1675
+ """
1676
+ return getattr(self.data, 'campaignName', None)
1677
+
1678
+ def setLumiMask(self, lumiMask=None, override=True):
1679
+ """
1680
+ Attach the given LumiMask to the task
1681
+ At this point the lumi mask is just the compactList dict not the LumiList object
1682
+ """
1683
+
1684
+ if not lumiMask:
1685
+ return
1686
+
1687
+ runs = getattr(self.data.input.splitting, 'runs', None)
1688
+ lumis = getattr(self.data.input.splitting, 'lumis', None)
1689
+ if not override and runs and lumis: # Unless instructed, don't overwrite runs and lumis which may be there from a task already
1690
+ return
1691
+
1692
+ runs = []
1693
+ lumis = []
1694
+ for run, runLumis in viewitems(lumiMask):
1695
+ runs.append(int(run))
1696
+ lumiList = []
1697
+ for lumi in runLumis:
1698
+ lumiList.extend([str(l) for l in lumi])
1699
+ lumis.append(','.join(lumiList))
1700
+
1701
+ self.data.input.splitting.runs = runs
1702
+ self.data.input.splitting.lumis = lumis
1703
+
1704
+ for task in self.childTaskIterator():
1705
+ task.setLumiMask(lumiMask, override)
1706
+
1707
+ return
1708
+
1709
+ def getLumiMask(self):
1710
+ """
1711
+ return the lumi mask
1712
+ """
1713
+ runs = getattr(self.data.input.splitting, 'runs', None)
1714
+ lumis = getattr(self.data.input.splitting, 'lumis', None)
1715
+ if runs and lumis:
1716
+ return LumiList(wmagentFormat=(runs, lumis))
1717
+
1718
+ return {}
1719
+
1720
+ def _propMethodMap(self):
1721
+ """
1722
+ internal mapping methop which maps which method need to be call for each
1723
+ property.
1724
+ For now only contains properties which updates in assignment stage.
1725
+ """
1726
+ propMap = {"ProcessingVersion": self.setProcessingVersion,
1727
+ "AcquisitionEra": self.setAcquisitionEra,
1728
+ "ProcessingString": self.setProcessingString
1729
+ }
1730
+ return propMap
1731
+
1732
+ def setProperties(self, properties):
1733
+ """
1734
+ set task properties (only for assignment stage but make it more general)
1735
+ """
1736
+ for prop, value in viewitems(properties):
1737
+ self._propMethodMap()[prop](value)
1738
+
1739
+ def deleteChild(self, childName):
1740
+ """
1741
+ _deleteChild_
1742
+
1743
+ Remove the child task from the tree, if it exists
1744
+ """
1745
+ self.deleteNode(childName)
1746
+
1747
+ def setPrepID(self, prepID):
1748
+ """
1749
+ _setPrepID_
1750
+
1751
+ Set the prepID to for all the tasks below
1752
+ """
1753
+ # if prepID doesn exist set it, if exist ignore.
1754
+ if not self.getPrepID() and prepID:
1755
+ self.data.prepID = prepID
1756
+
1757
+ prepID = self.getPrepID()
1758
+ # set child prepid
1759
+ if prepID:
1760
+ for task in self.childTaskIterator():
1761
+ task.setPrepID(prepID)
1762
+
1763
+ def getPrepID(self):
1764
+ """
1765
+ _getPrepID_
1766
+
1767
+ Get the prepID for the workflow
1768
+ """
1769
+ return getattr(self.data, 'prepID', None)
1770
+
1771
+ def setLFNBase(self, mergedLFNBase, unmergedLFNBase):
1772
+ """
1773
+ _setLFNBase_
1774
+
1775
+ Set the merged and unmerged base LFNs for all tasks.
1776
+ """
1777
+ self.data.mergedLFNBase = mergedLFNBase
1778
+ self.data.unmergedLFNBase = unmergedLFNBase
1779
+ for task in self.childTaskIterator():
1780
+ task.setLFNBase(mergedLFNBase, unmergedLFNBase)
1781
+
1782
+ return
1783
+
1784
+ def _getLFNBase(self):
1785
+ """
1786
+ private method getting lfn base.
1787
+ lfn base should be set by workflow
1788
+ """
1789
+ return (getattr(self.data, 'mergedLFNBase', "/store/data"),
1790
+ getattr(self.data, 'unmergedLFNBase', "/store/unmerged"))
1791
+
1792
+ def _getKeyValue(self, keyname, stepname, values):
1793
+ if keyname not in values:
1794
+ return
1795
+ elif isinstance(values[keyname], (newstr, bytes)):
1796
+ return values[keyname]
1797
+ elif isinstance(values[keyname], dict):
1798
+ return values[keyname].get(stepname)
1799
+
1800
+ def _updateLFNsStepChain(self, stepName, dictValues, stepMapping):
1801
+ """
1802
+ __updateLFNsStepChain_
1803
+
1804
+ Helper function needed for a proper StepChain LFN/ProcessedDataset handling
1805
+
1806
+ :param stepName: is the cmsRun name (cmsRun1, cmsRun2, ...)
1807
+ :param dictValues: part of the arguments provided during assignment
1808
+ :param stepMapping: built during StepChain creation
1809
+ :return: a single string for each of those 3 properties
1810
+ """
1811
+ reqStepName = None
1812
+ for reqStep, values in viewitems(stepMapping):
1813
+ if stepName == values[1]:
1814
+ reqStepName = reqStep
1815
+ if not reqStepName:
1816
+ # I have no idea which cmsRun is that...
1817
+ return None, None, None
1818
+
1819
+ era = self._getKeyValue('AcquisitionEra', reqStepName, dictValues)
1820
+ if not era:
1821
+ era = self.getAcquisitionEra()
1822
+ procstr = self._getKeyValue('ProcessingString', reqStepName, dictValues)
1823
+ if not procstr:
1824
+ procstr = self.getProcessingString()
1825
+ procver = self._getKeyValue('ProcessingVersion', reqStepName, dictValues)
1826
+ if not procver:
1827
+ procver = self.getProcessingVersion()
1828
+
1829
+ return era, procstr, procver
1830
+
1831
+ def updateLFNsAndDatasets(self, runNumber=None, dictValues=None, stepMapping=None):
1832
+ """
1833
+ _updateLFNsAndDatasets_
1834
+
1835
+ Update all the output LFNs and data names for all tasks in the workflow.
1836
+ This needs to be called after updating the acquisition era, processing
1837
+ version or merged/unmerged lfn base.
1838
+ """
1839
+ mergedLFNBase, unmergedLFNBase = self._getLFNBase()
1840
+ taskType = self.taskType()
1841
+
1842
+ for stepName in self.listAllStepNames():
1843
+ stepHelper = self.getStepHelper(stepName)
1844
+
1845
+ if stepHelper.stepType() == "CMSSW":
1846
+ if dictValues and stepMapping:
1847
+ # if it's a StepChain, then cast a dark spell on it
1848
+ acqera, procstr, procver = self._updateLFNsStepChain(stepName, dictValues, stepMapping)
1849
+ else:
1850
+ acqera = self.getAcquisitionEra()
1851
+ procstr = self.getProcessingString()
1852
+ procver = self.getProcessingVersion()
1853
+
1854
+ for outputModuleName in stepHelper.listOutputModules():
1855
+ outputModule = stepHelper.getOutputModule(outputModuleName)
1856
+ filterName = getattr(outputModule, "filterName", None)
1857
+
1858
+ if procstr:
1859
+ processingEra = "%s-v%i" % (procstr, procver)
1860
+ else:
1861
+ processingEra = "v%i" % procver
1862
+ if filterName:
1863
+ processedDataset = "%s-%s-%s" % (acqera, filterName, processingEra)
1864
+ processingString = "%s-%s" % (filterName, processingEra)
1865
+ else:
1866
+ processedDataset = "%s-%s" % (acqera, processingEra)
1867
+ processingString = processingEra
1868
+
1869
+ unmergedLFN = "%s/%s/%s/%s/%s" % (unmergedLFNBase,
1870
+ acqera,
1871
+ getattr(outputModule, "primaryDataset"),
1872
+ getattr(outputModule, "dataTier"),
1873
+ processingString)
1874
+ mergedLFN = "%s/%s/%s/%s/%s" % (mergedLFNBase,
1875
+ acqera,
1876
+ getattr(outputModule, "primaryDataset"),
1877
+ getattr(outputModule, "dataTier"),
1878
+ processingString)
1879
+
1880
+ if runNumber is not None and runNumber > 0:
1881
+ runString = str(runNumber).zfill(9)
1882
+ lfnSuffix = "/%s/%s/%s" % (runString[0:3],
1883
+ runString[3:6],
1884
+ runString[6:9])
1885
+ unmergedLFN += lfnSuffix
1886
+ mergedLFN += lfnSuffix
1887
+
1888
+ lfnBase(unmergedLFN)
1889
+ lfnBase(mergedLFN)
1890
+ setattr(outputModule, "processedDataset", processedDataset)
1891
+
1892
+ # For merge tasks, we want all output to go to the merged LFN base.
1893
+ if taskType == "Merge":
1894
+ setattr(outputModule, "lfnBase", mergedLFN)
1895
+ setattr(outputModule, "mergedLFNBase", mergedLFN)
1896
+
1897
+ if getattr(outputModule, "dataTier") in ["DQM", "DQMIO"]:
1898
+ datasetName = "/%s/%s/%s" % (getattr(outputModule, "primaryDataset"),
1899
+ processedDataset,
1900
+ getattr(outputModule, "dataTier"))
1901
+ self.updateDatasetName(datasetName)
1902
+ else:
1903
+ setattr(outputModule, "lfnBase", unmergedLFN)
1904
+ setattr(outputModule, "mergedLFNBase", mergedLFN)
1905
+
1906
+ self.setTaskLogBaseLFN(unmergedLFNBase)
1907
+
1908
+ # do the samething for all the child
1909
+ for task in self.childTaskIterator():
1910
+ task.updateLFNsAndDatasets(runNumber=runNumber)
1911
+
1912
+ return
1913
+
1914
+ def updateDatasetName(self, datasetName):
1915
+ """
1916
+ _updateDatasetName_
1917
+
1918
+ Updates the dataset name argument of the mergeTask's harvesting
1919
+ children tasks
1920
+ """
1921
+ for task in self.childTaskIterator():
1922
+ if task.taskType() == "Harvesting":
1923
+ for stepName in task.listAllStepNames():
1924
+ stepHelper = task.getStepHelper(stepName)
1925
+
1926
+ if stepHelper.stepType() == "CMSSW":
1927
+ cmsswHelper = stepHelper.getTypeHelper()
1928
+ cmsswHelper.setDatasetName(datasetName)
1929
+
1930
+ return
1931
+
1932
+
1933
+ class WMTask(ConfigSectionTree):
1934
+ """
1935
+ _WMTask_
1936
+
1937
+ workload management task.
1938
+ Allow a set of processing job specifications that are interdependent
1939
+ to be modelled as a tree structure.
1940
+
1941
+ """
1942
+
1943
+ def __init__(self, name):
1944
+ ConfigSectionTree.__init__(self, name)
1945
+ self.objectType = self.__class__.__name__
1946
+ self.pathName = None
1947
+ self.taskType = None
1948
+ self.prepID = None
1949
+ self.section_("steps")
1950
+ self.steps.topStepName = None
1951
+ self.section_("parameters")
1952
+ self.section_("pythonLibs")
1953
+ self.section_("constraints")
1954
+ self.section_("input")
1955
+ self.section_("notifications")
1956
+ self.section_("subscriptions")
1957
+ self.section_("environment")
1958
+ self.notifications.targets = []
1959
+ self.input.sandbox = None
1960
+ self.input.section_("splitting")
1961
+ self.input.splitting.algorithm = None
1962
+ self.input.splitting.section_("performance")
1963
+ self.constraints.section_("sites")
1964
+ self.constraints.sites.whitelist = []
1965
+ self.constraints.sites.blacklist = []
1966
+ self.constraints.sites.trustlists = False
1967
+ self.constraints.sites.trustPUlists = False
1968
+ self.subscriptions.outputSubs = []
1969
+ self.input.section_("WMBS")
1970
+
1971
+
1972
+ def makeWMTask(taskName):
1973
+ """
1974
+ _makeWMTask_
1975
+
1976
+ Convienience method to instantiate a new WMTask with the name
1977
+ provided and wrap it in a helper
1978
+
1979
+ """
1980
+ return WMTaskHelper(WMTask(taskName))