wmglobalqueue 2.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wmglobalqueue might be problematic. Click here for more details.

Files changed (345) hide show
  1. Utils/CPMetrics.py +270 -0
  2. Utils/CertTools.py +62 -0
  3. Utils/EmailAlert.py +50 -0
  4. Utils/ExtendedUnitTestCase.py +62 -0
  5. Utils/FileTools.py +182 -0
  6. Utils/IteratorTools.py +80 -0
  7. Utils/MathUtils.py +31 -0
  8. Utils/MemoryCache.py +119 -0
  9. Utils/Patterns.py +24 -0
  10. Utils/Pipeline.py +137 -0
  11. Utils/PortForward.py +97 -0
  12. Utils/ProcessStats.py +103 -0
  13. Utils/PythonVersion.py +17 -0
  14. Utils/Signals.py +36 -0
  15. Utils/TemporaryEnvironment.py +27 -0
  16. Utils/Throttled.py +227 -0
  17. Utils/Timers.py +130 -0
  18. Utils/Timestamps.py +86 -0
  19. Utils/TokenManager.py +143 -0
  20. Utils/Tracing.py +60 -0
  21. Utils/TwPrint.py +98 -0
  22. Utils/Utilities.py +308 -0
  23. Utils/__init__.py +11 -0
  24. WMCore/ACDC/Collection.py +57 -0
  25. WMCore/ACDC/CollectionTypes.py +12 -0
  26. WMCore/ACDC/CouchCollection.py +67 -0
  27. WMCore/ACDC/CouchFileset.py +238 -0
  28. WMCore/ACDC/CouchService.py +73 -0
  29. WMCore/ACDC/DataCollectionService.py +485 -0
  30. WMCore/ACDC/Fileset.py +94 -0
  31. WMCore/ACDC/__init__.py +11 -0
  32. WMCore/Algorithms/Alarm.py +39 -0
  33. WMCore/Algorithms/MathAlgos.py +274 -0
  34. WMCore/Algorithms/MiscAlgos.py +67 -0
  35. WMCore/Algorithms/ParseXMLFile.py +115 -0
  36. WMCore/Algorithms/Permissions.py +27 -0
  37. WMCore/Algorithms/Singleton.py +58 -0
  38. WMCore/Algorithms/SubprocessAlgos.py +129 -0
  39. WMCore/Algorithms/__init__.py +7 -0
  40. WMCore/Cache/GenericDataCache.py +98 -0
  41. WMCore/Cache/WMConfigCache.py +572 -0
  42. WMCore/Cache/__init__.py +0 -0
  43. WMCore/Configuration.py +651 -0
  44. WMCore/DAOFactory.py +47 -0
  45. WMCore/DataStructs/File.py +177 -0
  46. WMCore/DataStructs/Fileset.py +140 -0
  47. WMCore/DataStructs/Job.py +182 -0
  48. WMCore/DataStructs/JobGroup.py +142 -0
  49. WMCore/DataStructs/JobPackage.py +49 -0
  50. WMCore/DataStructs/LumiList.py +734 -0
  51. WMCore/DataStructs/Mask.py +219 -0
  52. WMCore/DataStructs/MathStructs/ContinuousSummaryHistogram.py +197 -0
  53. WMCore/DataStructs/MathStructs/DiscreteSummaryHistogram.py +92 -0
  54. WMCore/DataStructs/MathStructs/SummaryHistogram.py +117 -0
  55. WMCore/DataStructs/MathStructs/__init__.py +0 -0
  56. WMCore/DataStructs/Pickleable.py +24 -0
  57. WMCore/DataStructs/Run.py +256 -0
  58. WMCore/DataStructs/Subscription.py +175 -0
  59. WMCore/DataStructs/WMObject.py +47 -0
  60. WMCore/DataStructs/WorkUnit.py +112 -0
  61. WMCore/DataStructs/Workflow.py +60 -0
  62. WMCore/DataStructs/__init__.py +8 -0
  63. WMCore/Database/CMSCouch.py +1349 -0
  64. WMCore/Database/ConfigDBMap.py +29 -0
  65. WMCore/Database/CouchUtils.py +118 -0
  66. WMCore/Database/DBCore.py +198 -0
  67. WMCore/Database/DBCreator.py +113 -0
  68. WMCore/Database/DBExceptionHandler.py +57 -0
  69. WMCore/Database/DBFactory.py +110 -0
  70. WMCore/Database/DBFormatter.py +177 -0
  71. WMCore/Database/Dialects.py +13 -0
  72. WMCore/Database/ExecuteDAO.py +327 -0
  73. WMCore/Database/MongoDB.py +241 -0
  74. WMCore/Database/MySQL/Destroy.py +42 -0
  75. WMCore/Database/MySQL/ListUserContent.py +20 -0
  76. WMCore/Database/MySQL/__init__.py +9 -0
  77. WMCore/Database/MySQLCore.py +132 -0
  78. WMCore/Database/Oracle/Destroy.py +56 -0
  79. WMCore/Database/Oracle/ListUserContent.py +19 -0
  80. WMCore/Database/Oracle/__init__.py +9 -0
  81. WMCore/Database/ResultSet.py +44 -0
  82. WMCore/Database/Transaction.py +91 -0
  83. WMCore/Database/__init__.py +9 -0
  84. WMCore/Database/ipy_profile_couch.py +438 -0
  85. WMCore/GlobalWorkQueue/CherryPyThreads/CleanUpTask.py +29 -0
  86. WMCore/GlobalWorkQueue/CherryPyThreads/HeartbeatMonitor.py +105 -0
  87. WMCore/GlobalWorkQueue/CherryPyThreads/LocationUpdateTask.py +28 -0
  88. WMCore/GlobalWorkQueue/CherryPyThreads/ReqMgrInteractionTask.py +35 -0
  89. WMCore/GlobalWorkQueue/CherryPyThreads/__init__.py +0 -0
  90. WMCore/GlobalWorkQueue/__init__.py +0 -0
  91. WMCore/GroupUser/CouchObject.py +127 -0
  92. WMCore/GroupUser/Decorators.py +51 -0
  93. WMCore/GroupUser/Group.py +33 -0
  94. WMCore/GroupUser/Interface.py +73 -0
  95. WMCore/GroupUser/User.py +96 -0
  96. WMCore/GroupUser/__init__.py +11 -0
  97. WMCore/Lexicon.py +836 -0
  98. WMCore/REST/Auth.py +202 -0
  99. WMCore/REST/CherryPyPeriodicTask.py +166 -0
  100. WMCore/REST/Error.py +333 -0
  101. WMCore/REST/Format.py +642 -0
  102. WMCore/REST/HeartbeatMonitorBase.py +90 -0
  103. WMCore/REST/Main.py +623 -0
  104. WMCore/REST/Server.py +2435 -0
  105. WMCore/REST/Services.py +24 -0
  106. WMCore/REST/Test.py +120 -0
  107. WMCore/REST/Tools.py +38 -0
  108. WMCore/REST/Validation.py +250 -0
  109. WMCore/REST/__init__.py +1 -0
  110. WMCore/ReqMgr/DataStructs/RequestStatus.py +209 -0
  111. WMCore/ReqMgr/DataStructs/RequestType.py +13 -0
  112. WMCore/ReqMgr/DataStructs/__init__.py +0 -0
  113. WMCore/ReqMgr/__init__.py +1 -0
  114. WMCore/Services/AlertManager/AlertManagerAPI.py +111 -0
  115. WMCore/Services/AlertManager/__init__.py +0 -0
  116. WMCore/Services/CRIC/CRIC.py +238 -0
  117. WMCore/Services/CRIC/__init__.py +0 -0
  118. WMCore/Services/DBS/DBS3Reader.py +1044 -0
  119. WMCore/Services/DBS/DBSConcurrency.py +44 -0
  120. WMCore/Services/DBS/DBSErrors.py +113 -0
  121. WMCore/Services/DBS/DBSReader.py +23 -0
  122. WMCore/Services/DBS/DBSUtils.py +139 -0
  123. WMCore/Services/DBS/DBSWriterObjects.py +381 -0
  124. WMCore/Services/DBS/ProdException.py +133 -0
  125. WMCore/Services/DBS/__init__.py +8 -0
  126. WMCore/Services/FWJRDB/FWJRDBAPI.py +118 -0
  127. WMCore/Services/FWJRDB/__init__.py +0 -0
  128. WMCore/Services/HTTPS/HTTPSAuthHandler.py +66 -0
  129. WMCore/Services/HTTPS/__init__.py +0 -0
  130. WMCore/Services/LogDB/LogDB.py +201 -0
  131. WMCore/Services/LogDB/LogDBBackend.py +191 -0
  132. WMCore/Services/LogDB/LogDBExceptions.py +11 -0
  133. WMCore/Services/LogDB/LogDBReport.py +85 -0
  134. WMCore/Services/LogDB/__init__.py +0 -0
  135. WMCore/Services/MSPileup/__init__.py +0 -0
  136. WMCore/Services/MSUtils/MSUtils.py +54 -0
  137. WMCore/Services/MSUtils/__init__.py +0 -0
  138. WMCore/Services/McM/McM.py +173 -0
  139. WMCore/Services/McM/__init__.py +8 -0
  140. WMCore/Services/MonIT/Grafana.py +133 -0
  141. WMCore/Services/MonIT/__init__.py +0 -0
  142. WMCore/Services/PyCondor/PyCondorAPI.py +154 -0
  143. WMCore/Services/PyCondor/PyCondorUtils.py +105 -0
  144. WMCore/Services/PyCondor/__init__.py +0 -0
  145. WMCore/Services/ReqMgr/ReqMgr.py +261 -0
  146. WMCore/Services/ReqMgr/__init__.py +0 -0
  147. WMCore/Services/ReqMgrAux/ReqMgrAux.py +419 -0
  148. WMCore/Services/ReqMgrAux/__init__.py +0 -0
  149. WMCore/Services/RequestDB/RequestDBReader.py +267 -0
  150. WMCore/Services/RequestDB/RequestDBWriter.py +39 -0
  151. WMCore/Services/RequestDB/__init__.py +0 -0
  152. WMCore/Services/Requests.py +624 -0
  153. WMCore/Services/Rucio/Rucio.py +1287 -0
  154. WMCore/Services/Rucio/RucioUtils.py +74 -0
  155. WMCore/Services/Rucio/__init__.py +0 -0
  156. WMCore/Services/RucioConMon/RucioConMon.py +128 -0
  157. WMCore/Services/RucioConMon/__init__.py +0 -0
  158. WMCore/Services/Service.py +400 -0
  159. WMCore/Services/StompAMQ/__init__.py +0 -0
  160. WMCore/Services/TagCollector/TagCollector.py +155 -0
  161. WMCore/Services/TagCollector/XMLUtils.py +98 -0
  162. WMCore/Services/TagCollector/__init__.py +0 -0
  163. WMCore/Services/UUIDLib.py +13 -0
  164. WMCore/Services/UserFileCache/UserFileCache.py +160 -0
  165. WMCore/Services/UserFileCache/__init__.py +8 -0
  166. WMCore/Services/WMAgent/WMAgent.py +63 -0
  167. WMCore/Services/WMAgent/__init__.py +0 -0
  168. WMCore/Services/WMArchive/CMSSWMetrics.py +526 -0
  169. WMCore/Services/WMArchive/DataMap.py +463 -0
  170. WMCore/Services/WMArchive/WMArchive.py +33 -0
  171. WMCore/Services/WMArchive/__init__.py +0 -0
  172. WMCore/Services/WMBS/WMBS.py +97 -0
  173. WMCore/Services/WMBS/__init__.py +0 -0
  174. WMCore/Services/WMStats/DataStruct/RequestInfoCollection.py +300 -0
  175. WMCore/Services/WMStats/DataStruct/__init__.py +0 -0
  176. WMCore/Services/WMStats/WMStatsPycurl.py +145 -0
  177. WMCore/Services/WMStats/WMStatsReader.py +445 -0
  178. WMCore/Services/WMStats/WMStatsWriter.py +273 -0
  179. WMCore/Services/WMStats/__init__.py +0 -0
  180. WMCore/Services/WMStatsServer/WMStatsServer.py +134 -0
  181. WMCore/Services/WMStatsServer/__init__.py +0 -0
  182. WMCore/Services/WorkQueue/WorkQueue.py +492 -0
  183. WMCore/Services/WorkQueue/__init__.py +0 -0
  184. WMCore/Services/__init__.py +8 -0
  185. WMCore/Services/pycurl_manager.py +574 -0
  186. WMCore/WMBase.py +50 -0
  187. WMCore/WMConnectionBase.py +164 -0
  188. WMCore/WMException.py +183 -0
  189. WMCore/WMExceptions.py +269 -0
  190. WMCore/WMFactory.py +76 -0
  191. WMCore/WMInit.py +228 -0
  192. WMCore/WMLogging.py +108 -0
  193. WMCore/WMSpec/ConfigSectionTree.py +442 -0
  194. WMCore/WMSpec/Persistency.py +135 -0
  195. WMCore/WMSpec/Steps/BuildMaster.py +87 -0
  196. WMCore/WMSpec/Steps/BuildTools.py +201 -0
  197. WMCore/WMSpec/Steps/Builder.py +97 -0
  198. WMCore/WMSpec/Steps/Diagnostic.py +89 -0
  199. WMCore/WMSpec/Steps/Emulator.py +62 -0
  200. WMCore/WMSpec/Steps/ExecuteMaster.py +208 -0
  201. WMCore/WMSpec/Steps/Executor.py +210 -0
  202. WMCore/WMSpec/Steps/StepFactory.py +213 -0
  203. WMCore/WMSpec/Steps/TaskEmulator.py +75 -0
  204. WMCore/WMSpec/Steps/Template.py +204 -0
  205. WMCore/WMSpec/Steps/Templates/AlcaHarvest.py +76 -0
  206. WMCore/WMSpec/Steps/Templates/CMSSW.py +613 -0
  207. WMCore/WMSpec/Steps/Templates/DQMUpload.py +59 -0
  208. WMCore/WMSpec/Steps/Templates/DeleteFiles.py +70 -0
  209. WMCore/WMSpec/Steps/Templates/LogArchive.py +84 -0
  210. WMCore/WMSpec/Steps/Templates/LogCollect.py +105 -0
  211. WMCore/WMSpec/Steps/Templates/StageOut.py +105 -0
  212. WMCore/WMSpec/Steps/Templates/__init__.py +10 -0
  213. WMCore/WMSpec/Steps/WMExecutionFailure.py +21 -0
  214. WMCore/WMSpec/Steps/__init__.py +8 -0
  215. WMCore/WMSpec/Utilities.py +63 -0
  216. WMCore/WMSpec/WMSpecErrors.py +12 -0
  217. WMCore/WMSpec/WMStep.py +347 -0
  218. WMCore/WMSpec/WMTask.py +1980 -0
  219. WMCore/WMSpec/WMWorkload.py +2288 -0
  220. WMCore/WMSpec/WMWorkloadTools.py +370 -0
  221. WMCore/WMSpec/__init__.py +9 -0
  222. WMCore/WorkQueue/DataLocationMapper.py +273 -0
  223. WMCore/WorkQueue/DataStructs/ACDCBlock.py +47 -0
  224. WMCore/WorkQueue/DataStructs/Block.py +48 -0
  225. WMCore/WorkQueue/DataStructs/CouchWorkQueueElement.py +148 -0
  226. WMCore/WorkQueue/DataStructs/WorkQueueElement.py +274 -0
  227. WMCore/WorkQueue/DataStructs/WorkQueueElementResult.py +152 -0
  228. WMCore/WorkQueue/DataStructs/WorkQueueElementsSummary.py +185 -0
  229. WMCore/WorkQueue/DataStructs/__init__.py +0 -0
  230. WMCore/WorkQueue/Policy/End/EndPolicyInterface.py +44 -0
  231. WMCore/WorkQueue/Policy/End/SingleShot.py +22 -0
  232. WMCore/WorkQueue/Policy/End/__init__.py +32 -0
  233. WMCore/WorkQueue/Policy/PolicyInterface.py +17 -0
  234. WMCore/WorkQueue/Policy/Start/Block.py +258 -0
  235. WMCore/WorkQueue/Policy/Start/Dataset.py +180 -0
  236. WMCore/WorkQueue/Policy/Start/MonteCarlo.py +131 -0
  237. WMCore/WorkQueue/Policy/Start/ResubmitBlock.py +171 -0
  238. WMCore/WorkQueue/Policy/Start/StartPolicyInterface.py +316 -0
  239. WMCore/WorkQueue/Policy/Start/__init__.py +34 -0
  240. WMCore/WorkQueue/Policy/__init__.py +57 -0
  241. WMCore/WorkQueue/WMBSHelper.py +772 -0
  242. WMCore/WorkQueue/WorkQueue.py +1237 -0
  243. WMCore/WorkQueue/WorkQueueBackend.py +750 -0
  244. WMCore/WorkQueue/WorkQueueBase.py +39 -0
  245. WMCore/WorkQueue/WorkQueueExceptions.py +44 -0
  246. WMCore/WorkQueue/WorkQueueReqMgrInterface.py +278 -0
  247. WMCore/WorkQueue/WorkQueueUtils.py +130 -0
  248. WMCore/WorkQueue/__init__.py +13 -0
  249. WMCore/Wrappers/JsonWrapper/JSONThunker.py +342 -0
  250. WMCore/Wrappers/JsonWrapper/__init__.py +7 -0
  251. WMCore/Wrappers/__init__.py +6 -0
  252. WMCore/__init__.py +10 -0
  253. wmglobalqueue-2.3.10.data/data/bin/wmc-dist-patch +15 -0
  254. wmglobalqueue-2.3.10.data/data/bin/wmc-dist-unpatch +8 -0
  255. wmglobalqueue-2.3.10.data/data/bin/wmc-httpd +3 -0
  256. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/.couchapprc +1 -0
  257. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/README.md +40 -0
  258. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/index.html +264 -0
  259. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/ElementInfoByWorkflow.js +96 -0
  260. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/StuckElementInfo.js +57 -0
  261. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/WorkloadInfoTable.js +80 -0
  262. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/dataTable.js +70 -0
  263. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/js/namespace.js +23 -0
  264. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/_attachments/style/main.css +75 -0
  265. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/couchapp.json +4 -0
  266. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/filters/childQueueFilter.js +13 -0
  267. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/filters/filterDeletedDocs.js +3 -0
  268. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/filters/queueFilter.js +11 -0
  269. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/language +1 -0
  270. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lib/mustache.js +333 -0
  271. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lib/validate.js +27 -0
  272. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lib/workqueue_utils.js +61 -0
  273. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/elementsDetail.js +28 -0
  274. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/filter.js +86 -0
  275. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/stuckElements.js +38 -0
  276. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/workRestrictions.js +153 -0
  277. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/lists/workflowSummary.js +28 -0
  278. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/rewrites.json +73 -0
  279. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/shows/redirect.js +23 -0
  280. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/shows/status.js +40 -0
  281. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/ElementSummaryByWorkflow.html +27 -0
  282. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/StuckElementSummary.html +26 -0
  283. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/TaskStatus.html +23 -0
  284. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/WorkflowSummary.html +27 -0
  285. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/partials/workqueue-common-lib.html +2 -0
  286. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib-remote.html +16 -0
  287. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/templates/partials/yui-lib.html +18 -0
  288. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/updates/in-place.js +50 -0
  289. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/validate_doc_update.js +8 -0
  290. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.couch.app.js +235 -0
  291. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/vendor/couchapp/_attachments/jquery.pathbinder.js +173 -0
  292. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeData/map.js +8 -0
  293. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeData/reduce.js +2 -0
  294. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeParentData/map.js +8 -0
  295. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activeParentData/reduce.js +2 -0
  296. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activePileupData/map.js +8 -0
  297. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/activePileupData/reduce.js +2 -0
  298. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/analyticsData/map.js +11 -0
  299. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/analyticsData/reduce.js +1 -0
  300. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/availableByPriority/map.js +6 -0
  301. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/conflicts/map.js +5 -0
  302. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elements/map.js +5 -0
  303. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByData/map.js +8 -0
  304. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByParent/map.js +8 -0
  305. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByParentData/map.js +8 -0
  306. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByPileupData/map.js +8 -0
  307. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByStatus/map.js +8 -0
  308. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsBySubscription/map.js +6 -0
  309. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/map.js +8 -0
  310. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsByWorkflow/reduce.js +3 -0
  311. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/elementsDetailByWorkflowAndStatus/map.js +26 -0
  312. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/map.js +10 -0
  313. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobInjectStatusByRequest/reduce.js +1 -0
  314. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/map.js +6 -0
  315. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobStatusByRequest/reduce.js +1 -0
  316. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/map.js +6 -0
  317. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndPriority/reduce.js +1 -0
  318. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/map.js +6 -0
  319. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByChildQueueAndStatus/reduce.js +1 -0
  320. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByRequest/map.js +6 -0
  321. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByRequest/reduce.js +1 -0
  322. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatus/map.js +6 -0
  323. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatus/reduce.js +1 -0
  324. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/map.js +6 -0
  325. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/jobsByStatusAndPriority/reduce.js +1 -0
  326. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/openRequests/map.js +6 -0
  327. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/recent-items/map.js +5 -0
  328. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/map.js +6 -0
  329. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/siteWhitelistByRequest/reduce.js +1 -0
  330. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/specsByWorkflow/map.js +5 -0
  331. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/stuckElements/map.js +38 -0
  332. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/map.js +12 -0
  333. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsInjectStatusByRequest/reduce.js +3 -0
  334. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrl/map.js +6 -0
  335. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrl/reduce.js +2 -0
  336. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/map.js +6 -0
  337. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/wmbsUrlByRequest/reduce.js +2 -0
  338. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/workflowSummary/map.js +9 -0
  339. wmglobalqueue-2.3.10.data/data/data/couchapps/WorkQueue/views/workflowSummary/reduce.js +10 -0
  340. wmglobalqueue-2.3.10.dist-info/LICENSE +202 -0
  341. wmglobalqueue-2.3.10.dist-info/METADATA +24 -0
  342. wmglobalqueue-2.3.10.dist-info/NOTICE +16 -0
  343. wmglobalqueue-2.3.10.dist-info/RECORD +345 -0
  344. wmglobalqueue-2.3.10.dist-info/WHEEL +5 -0
  345. wmglobalqueue-2.3.10.dist-info/top_level.txt +2 -0
@@ -0,0 +1,1349 @@
1
+ #!/usr/bin/env python
2
+ """
3
+ _CMSCouch_
4
+
5
+ A simple API to CouchDB that sends HTTP requests to the REST interface.
6
+
7
+ http://wiki.apache.org/couchdb/API_Cheatsheet
8
+
9
+ NOT A THREAD SAFE CLASS.
10
+ """
11
+ from __future__ import print_function, division
12
+ from builtins import str as newstr, bytes as newbytes, object
13
+ from Utils.Utilities import decodeBytesToUnicode, encodeUnicodeToBytes, decodeBytesToUnicodeConditional
14
+ from Utils.PythonVersion import PY3
15
+
16
+ from future import standard_library
17
+ standard_library.install_aliases()
18
+ from future.utils import viewitems
19
+ import urllib.request, urllib.parse, urllib.error
20
+
21
+ import base64
22
+ import hashlib
23
+ import json
24
+ import logging
25
+ import re
26
+ import time
27
+ import sys
28
+ from datetime import datetime
29
+ from http.client import HTTPException
30
+
31
+ from Utils.IteratorTools import grouper, nestedDictUpdate
32
+ from WMCore.Lexicon import sanitizeURL
33
+ from WMCore.Services.Requests import JSONRequests
34
+
35
+
36
+ def check_name(dbname):
37
+ match = re.match("^[a-z0-9_$()+-/]+$", urllib.parse.unquote_plus(dbname))
38
+ if not match:
39
+ msg = '%s is not a valid database name'
40
+ raise ValueError(msg % urllib.parse.unquote_plus(dbname))
41
+
42
+
43
+ def check_server_url(srvurl):
44
+ good_name = srvurl.startswith('http://') or srvurl.startswith('https://')
45
+ if not good_name:
46
+ raise ValueError('You must include http(s):// in your servers address')
47
+
48
+
49
+ PY3_STR_DECODER = lambda x: decodeBytesToUnicodeConditional(x, condition=PY3)
50
+
51
+
52
+ class Document(dict):
53
+ """
54
+ Document class is the instantiation of one document in the CouchDB
55
+ """
56
+
57
+ def __init__(self, id=None, inputDict=None):
58
+ """
59
+ Initialise our Document object - a dictionary which has an id field
60
+ inputDict - input dictionary to initialise this instance
61
+ """
62
+ inputDict = inputDict or {}
63
+ dict.__init__(self)
64
+ self.update(inputDict)
65
+ if id:
66
+ self.setdefault("_id", id)
67
+
68
+ def delete(self):
69
+ """
70
+ Mark the document as deleted
71
+ """
72
+ # https://issues.apache.org/jira/browse/COUCHDB-1141
73
+ deletedDict = {'_id': self['_id'], '_rev': self['_rev'], '_deleted': True}
74
+ self.update(deletedDict)
75
+ for key in list(self.keys()):
76
+ if key not in deletedDict:
77
+ del self[key]
78
+
79
+ def __to_json__(self, thunker):
80
+ """
81
+ __to_json__
82
+
83
+ This is here to prevent the serializer from attempting to serialize
84
+ this object and adding a bunch of keys that couch won't understand.
85
+ """
86
+ jsonDict = {}
87
+ for key in self.keys():
88
+ jsonDict[key] = self[key]
89
+
90
+ return jsonDict
91
+
92
+
93
+ class CouchDBRequests(JSONRequests):
94
+ """
95
+ CouchDB has two non-standard HTTP calls, implement them here for
96
+ completeness, and talks to the CouchDB port
97
+ """
98
+
99
+ def __init__(self, url='http://localhost:5984', usePYCurl=True, ckey=None, cert=None, capath=None):
100
+ """
101
+ Initialise requests
102
+ """
103
+ JSONRequests.__init__(self, url,
104
+ {"cachepath": None, "pycurl": usePYCurl, "key": ckey, "cert": cert, "capath": capath})
105
+ self.accept_type = "application/json"
106
+ self["timeout"] = 600
107
+
108
+ def move(self, uri=None, data=None):
109
+ """
110
+ MOVE some data
111
+ """
112
+ return self.makeRequest(uri, data, 'MOVE')
113
+
114
+ def copy(self, uri=None, data=None):
115
+ """
116
+ COPY some data
117
+ """
118
+ return self.makeRequest(uri, data, 'COPY')
119
+
120
+ def makeRequest(self, uri=None, data=None, type='GET', incoming_headers=None,
121
+ encode=True, decode=True, contentType=None, cache=False):
122
+ """
123
+ Make the request, handle any failed status, return just the data (for
124
+ compatibility). By default do not cache the response.
125
+
126
+ TODO: set caching in the calling methods.
127
+ """
128
+ incoming_headers = incoming_headers or {}
129
+ incoming_headers.update(self.additionalHeaders)
130
+ try:
131
+ if not cache:
132
+ incoming_headers.update({'Cache-Control': 'no-cache'})
133
+ result, status, reason, cached = JSONRequests.makeRequest(
134
+ self, uri, data, type, incoming_headers,
135
+ encode, decode, contentType)
136
+ except HTTPException as e:
137
+ self.checkForCouchError(getattr(e, "status", None),
138
+ getattr(e, "reason", None),
139
+ data,
140
+ getattr(e, "result", None))
141
+
142
+ return result
143
+
144
+ def checkForCouchError(self, status, reason, data=None, result=None):
145
+ """
146
+ _checkForCouchError_
147
+
148
+ Check the HTTP status and raise an appropriate exception.
149
+ """
150
+ if status == 400:
151
+ raise CouchBadRequestError(reason, data, result, status)
152
+ elif status == 401:
153
+ raise CouchUnauthorisedError(reason, data, result, status)
154
+ elif status == 403:
155
+ raise CouchForbidden(reason, data, result, status)
156
+ elif status == 404:
157
+ raise CouchNotFoundError(reason, data, result, status)
158
+ elif status == 405:
159
+ raise CouchNotAllowedError(reason, data, result, status)
160
+ elif status == 406:
161
+ raise CouchNotAcceptableError(reason, data, result, status)
162
+ elif status == 409:
163
+ raise CouchConflictError(reason, data, result, status)
164
+ elif status == 410:
165
+ raise CouchFeatureGone(reason, data, result, status)
166
+ elif status == 412:
167
+ raise CouchPreconditionFailedError(reason, data, result, status)
168
+ elif status == 413:
169
+ raise CouchRequestTooLargeError(reason, data, result, status)
170
+ elif status == 416:
171
+ raise CouchRequestedRangeNotSatisfiableError(reason, data, result, status)
172
+ elif status == 417:
173
+ raise CouchExpectationFailedError(reason, data, result, status)
174
+ elif status == 500:
175
+ raise CouchInternalServerError(reason, data, result, status)
176
+ elif status in [502, 503, 504]:
177
+ # There are HTTP errors that CouchDB doesn't raise but can appear
178
+ # in our environment, e.g. behind a proxy. Reraise the HTTPException
179
+ raise CouchError(reason, data, result, status)
180
+ else:
181
+ # We have a new error status, log it
182
+ raise CouchError(reason, data, result, status)
183
+
184
+
185
+
186
+ class Database(CouchDBRequests):
187
+ """
188
+ Object representing a connection to a CouchDB Database instance.
189
+ TODO: implement COPY and MOVE calls.
190
+ TODO: remove leading whitespace when committing a view
191
+ """
192
+
193
+ def __init__(self, dbname='database', url='http://localhost:5984', size=1000, ckey=None, cert=None):
194
+ """
195
+ A set of queries against a CouchDB database
196
+ """
197
+ check_name(dbname)
198
+
199
+ self.name = urllib.parse.quote_plus(dbname)
200
+
201
+ CouchDBRequests.__init__(self, url=url, ckey=ckey, cert=cert)
202
+ self._reset_queue()
203
+
204
+ self._queue_size = size
205
+ self.threads = []
206
+ self.last_seq = 0
207
+
208
+ def _reset_queue(self):
209
+ """
210
+ Set the queue to an empty list, e.g. after a commit
211
+ """
212
+ self._queue = []
213
+
214
+ def timestamp(self, data, label=''):
215
+ """
216
+ Time stamp each doc in a list
217
+ """
218
+ if label is True:
219
+ label = 'timestamp'
220
+
221
+ if isinstance(data, type({})):
222
+ data[label] = int(time.time())
223
+ else:
224
+ for doc in data:
225
+ if label not in doc:
226
+ doc[label] = int(time.time())
227
+ return data
228
+
229
+ def getQueueSize(self):
230
+ """
231
+ Return the current size of the queue, i.e., how
232
+ many documents are already queued up
233
+ """
234
+ return len(self._queue)
235
+
236
+ def queue(self, doc, timestamp=False, viewlist=None, callback=None):
237
+ """
238
+ Queue up a doc for bulk insert. If timestamp = True add a timestamp
239
+ field if one doesn't exist. Use this over commit(timestamp=True) if you
240
+ want to timestamp when a document was added to the queue instead of when
241
+ it was committed
242
+ If a callback is specified then pass it to the commit function if a
243
+ commit is triggered
244
+ """
245
+ viewlist = viewlist or []
246
+ if timestamp:
247
+ self.timestamp(doc, timestamp)
248
+ # TODO: Thread this off so that it's non blocking...
249
+ if self.getQueueSize() >= self._queue_size:
250
+ logging.warning('queue larger than %s records, committing', self._queue_size)
251
+ self.commit(viewlist=viewlist, callback=callback)
252
+ self._queue.append(doc)
253
+
254
+ def queueDelete(self, doc):
255
+ """
256
+ Queue up a document for deletion
257
+ """
258
+ assert isinstance(doc, type({})), "document not a dictionary"
259
+ # https://issues.apache.org/jira/browse/COUCHDB-1141
260
+ doc = {'_id': doc['_id'], '_rev': doc['_rev'], '_deleted': True}
261
+ self.queue(doc)
262
+
263
+ def commitOne(self, doc, timestamp=False, viewlist=None):
264
+ """
265
+ Helper function for when you know you only want to insert one doc
266
+ additionally keeps from having to rewrite ConfigCache to handle the
267
+ new commit function's semantics
268
+ """
269
+ viewlist = viewlist or []
270
+ uri = '/%s/_bulk_docs/' % self.name
271
+ if timestamp:
272
+ self.timestamp(doc, timestamp)
273
+
274
+ data = {'docs': [doc]}
275
+ retval = self.post(uri, data)
276
+ for v in viewlist:
277
+ design, view = v.split('/')
278
+ self.loadView(design, view, {'limit': 0})
279
+ return retval
280
+
281
+ def commit(self, doc=None, returndocs=False, timestamp=False,
282
+ viewlist=None, callback=None, **data):
283
+ """
284
+ Add doc and/or the contents of self._queue to the database.
285
+ If timestamp is true timestamp all documents with a unix style
286
+ timestamp - this will be the timestamp of when the commit was called, it
287
+ will not override an existing timestamp field. If timestamp is a string
288
+ that string will be used as the label for the timestamp.
289
+
290
+ The callback function will be called with the documents that trigger a
291
+ conflict when doing the bulk post of the documents in the queue,
292
+ callback functions must accept the database object, the data posted and a row in the
293
+ result from the bulk commit. The callback updates the retval with
294
+ its internal retval
295
+
296
+ key, value pairs can be used to pass extra parameters to the bulk doc api
297
+ See https://docs.couchdb.org/en/latest/api/database/bulk-api.html#db-bulk-docs
298
+
299
+ TODO: restore support for returndocs and viewlist
300
+
301
+ Returns a list of good documents
302
+ throws an exception otherwise
303
+ """
304
+ viewlist = viewlist or []
305
+ if doc:
306
+ self.queue(doc, timestamp, viewlist)
307
+
308
+ if not self._queue:
309
+ return
310
+
311
+ if timestamp:
312
+ self.timestamp(self._queue, timestamp)
313
+ # TODO: commit in thread to avoid blocking others
314
+ uri = '/%s/_bulk_docs/' % self.name
315
+
316
+ data['docs'] = list(self._queue)
317
+ retval = self.post(uri, data)
318
+ self._reset_queue()
319
+ for v in viewlist:
320
+ design, view = v.split('/')
321
+ self.loadView(design, view, {'limit': 0})
322
+ if callback:
323
+ for idx, result in enumerate(retval):
324
+ if result.get('error', None) == 'conflict':
325
+ retval[idx] = callback(self, data, result)
326
+
327
+ return retval
328
+
329
+ def document(self, id, rev=None):
330
+ """
331
+ Load a document identified by id. You can specify a rev to see an older revision
332
+ of the document. This **should only** be used when resolving conflicts, relying
333
+ on CouchDB revisions for document history is not safe, as any compaction will
334
+ remove the older revisions.
335
+ """
336
+ uri = '/%s/%s' % (self.name, urllib.parse.quote_plus(id))
337
+ if rev:
338
+ uri += '?' + urllib.parse.urlencode({'rev': rev})
339
+ return Document(id=id, inputDict=self.get(uri))
340
+
341
+ def updateDocument(self, doc_id, design, update_func, fields=None, useBody=False):
342
+ """
343
+ Call the update function update_func defined in the design document
344
+ design for the document doc_id with a query string built from fields.
345
+
346
+ http://wiki.apache.org/couchdb/Document_Update_Handlers
347
+ """
348
+ fields = fields or {}
349
+ # Clean up /'s in the name etc.
350
+ doc_id = urllib.parse.quote_plus(doc_id)
351
+
352
+ if not useBody:
353
+ updateUri = '/%s/_design/%s/_update/%s/%s?%s' % \
354
+ (self.name, design, update_func, doc_id, urllib.parse.urlencode(fields))
355
+
356
+ return self.put(uri=updateUri, decode=PY3_STR_DECODER)
357
+ else:
358
+ updateUri = '/%s/_design/%s/_update/%s/%s' % \
359
+ (self.name, design, update_func, doc_id)
360
+ return self.put(uri=updateUri, data=fields, decode=PY3_STR_DECODER)
361
+
362
+ def updateBulkDocuments(self, doc_ids, paramsToUpdate, updateLimits=1000):
363
+
364
+ uri = '/%s/_bulk_docs/' % self.name
365
+ conflictDocIDs = []
366
+ for ids in grouper(doc_ids, updateLimits):
367
+ # get original documens
368
+ docs = self.allDocs(options={"include_docs": True}, keys=ids)['rows']
369
+ data = {}
370
+ data['docs'] = []
371
+ for j in docs:
372
+ doc = {}
373
+ doc.update(j['doc'])
374
+ nestedDictUpdate(doc, paramsToUpdate)
375
+ data['docs'].append(doc)
376
+
377
+ if data['docs']:
378
+ retval = self.post(uri, data)
379
+ for result in retval:
380
+ if result.get('error', None) == 'conflict':
381
+ conflictDocIDs.append(result['id'])
382
+
383
+ return conflictDocIDs
384
+
385
+ def updateBulkDocumentsWithConflictHandle(self, doc_ids, updateParams, updateLimits=1000, maxConflictLimit=10):
386
+ """
387
+ param: doc_ids: list couch doc ids for updates, shouldn't contain any duplicate or empty string
388
+ param: updateParams: dictionary of parameters to be updated.
389
+ param: updateLimits: number of documents in one commit
390
+ pram maxConflictLimit: number of conflicts fix tries before we give up to fix it to prevent infinite calls
391
+ """
392
+ conflictDocIDs = self.updateBulkDocuments(doc_ids, updateParams, updateLimits)
393
+ if conflictDocIDs:
394
+ # wait a second before trying again for the confict documents
395
+ if maxConflictLimit == 0:
396
+ return conflictDocIDs
397
+ time.sleep(1)
398
+ self.updateBulkDocumentsWithConflictHandle(conflictDocIDs, updateParams,
399
+ maxConflictLimit=maxConflictLimit - 1)
400
+ return []
401
+
402
+ def putDocument(self, doc_id, fields):
403
+ """
404
+ Call the update function update_func defined in the design document
405
+ design for the document doc_id with a query string built from fields.
406
+
407
+ http://wiki.apache.org/couchdb/Document_Update_Handlers
408
+ """
409
+ # Clean up /'s in the name etc.
410
+ doc_id = urllib.parse.quote_plus(doc_id)
411
+
412
+ updateUri = '/%s/%s' % (self.name, doc_id)
413
+ return self.put(uri=updateUri, data=fields, decode=PY3_STR_DECODER)
414
+
415
+ def documentExists(self, id, rev=None):
416
+ """
417
+ Check if a document exists by ID. If specified check that the revision rev exists.
418
+ """
419
+ uri = "/%s/%s" % (self.name, urllib.parse.quote_plus(id))
420
+ if rev:
421
+ uri += '?' + urllib.parse.urlencode({'rev': rev})
422
+ try:
423
+ self.makeRequest(uri, {}, 'HEAD')
424
+ return True
425
+ except CouchNotFoundError:
426
+ return False
427
+
428
+ def delete_doc(self, id, rev=None):
429
+ """
430
+ Immediately delete a document identified by id and rev.
431
+ If revision is not provided, we need to first fetch this
432
+ document to read the current revision number.
433
+
434
+ :param id: string with the document name
435
+ :param rev: string with the revision number
436
+ :return: an empty dictionary if it fails to fetch the document,
437
+ or a dictionary with the deletion outcome, e.g.:
438
+ {'ok': True, 'id': 'doc_name', 'rev': '3-f68156d'}
439
+ """
440
+ uri = '/%s/%s' % (self.name, urllib.parse.quote_plus(id))
441
+ if not rev:
442
+ # then we need to fetch the latest revision number
443
+ doc = self.getDoc(id)
444
+ if "_rev" not in doc:
445
+ logging.warning("Failed to retrieve doc id: %s for deletion.", id)
446
+ return doc
447
+ rev = doc["_rev"]
448
+ uri += '?' + urllib.parse.urlencode({'rev': rev})
449
+ return self.delete(uri)
450
+
451
+ def compact(self, views=None, blocking=False, blocking_poll=5, callback=False):
452
+ """
453
+ Compact the database: http://wiki.apache.org/couchdb/Compaction
454
+
455
+ If given, views should be a list of design document name (minus the
456
+ _design/ - e.g. myviews not _design/myviews). For each view in the list
457
+ view compaction will be triggered. Also, if the views list is provided
458
+ _view_cleanup is called to remove old view output.
459
+
460
+ If True blocking will cause this call to wait until the compaction is
461
+ completed, polling for status with frequency blocking_poll and calling
462
+ the function specified by callback on each iteration.
463
+
464
+ The callback function can be used for logging and could also be used to
465
+ timeout the compaction based on status (e.g. don't time out if compaction
466
+ is less than X% complete. The callback function takes the Database (self)
467
+ as an argument. If the callback function raises an exception the block is
468
+ removed and the compact call returns.
469
+ """
470
+ views = views or []
471
+ response = self.post('/%s/_compact' % self.name)
472
+ if views:
473
+ for view in views:
474
+ response[view] = self.post('/%s/_compact/%s' % (self.name, view))
475
+ response['view_cleanup'] = self.post('/%s/_view_cleanup' % (self.name))
476
+
477
+ if blocking:
478
+ while self.info()['compact_running']:
479
+ if callback:
480
+ try:
481
+ callback(self)
482
+ except Exception:
483
+ return response
484
+ time.sleep(blocking_poll)
485
+ return response
486
+
487
+ def changes(self, since=-1):
488
+ """
489
+ Get the changes since sequence number. Store the last sequence value to
490
+ self.last_seq. If the since is negative use self.last_seq.
491
+ """
492
+ if since < 0:
493
+ since = self.last_seq
494
+ data = self.get('/%s/_changes/?since=%s' % (self.name, since))
495
+ self.last_seq = data['last_seq']
496
+ return data
497
+
498
+ def changesWithFilter(self, filter, limit=1000, since=-1):
499
+ """
500
+ Get the changes since sequence number. Store the last sequence value to
501
+ self.last_seq. If the since is negative use self.last_seq.
502
+ """
503
+ if since < 0:
504
+ since = self.last_seq
505
+ data = self.get('/%s/_changes?limit=%s&since=%s&filter=%s' % (self.name, limit, since, filter))
506
+ self.last_seq = data['last_seq']
507
+ return data
508
+
509
+ def purge(self, data):
510
+ return self.post('/%s/_purge' % self.name, data)
511
+
512
+ def loadView(self, design, view, options=None, keys=None):
513
+ """
514
+ Load a view by getting, for example:
515
+ http://localhost:5984/tester/_view/viewtest/age_name?count=10&group=true
516
+
517
+ The following URL query arguments are allowed:
518
+
519
+ GET
520
+ key=keyvalue
521
+ startkey=keyvalue
522
+ startkey_docid=docid
523
+ endkey=keyvalue
524
+ endkey_docid=docid
525
+ limit=max rows to return
526
+ stale=ok
527
+ descending=true
528
+ skip=number of rows to skip
529
+ group=true Version 0.8.0 and forward
530
+ group_level=int
531
+ reduce=false Trunk only (0.9)
532
+ include_docs=true Trunk only (0.9)
533
+ POST
534
+ {"keys": ["key1", "key2", ...]} Trunk only (0.9)
535
+
536
+ more info: http://wiki.apache.org/couchdb/HTTP_view_API
537
+ """
538
+ options = options or {}
539
+ keys = keys or []
540
+ encodedOptions = {}
541
+ for k, v in viewitems(options):
542
+ # We can't encode the stale option, as it will be converted to '"ok"'
543
+ # which couch barfs on.
544
+ if k == "stale":
545
+ encodedOptions[k] = v
546
+ else:
547
+ encodedOptions[k] = self.encode(v)
548
+
549
+ if keys:
550
+ if encodedOptions:
551
+ data = urllib.parse.urlencode(encodedOptions)
552
+ retval = self.post('/%s/_design/%s/_view/%s?%s' % \
553
+ (self.name, design, view, data), {'keys': keys})
554
+ else:
555
+ retval = self.post('/%s/_design/%s/_view/%s' % \
556
+ (self.name, design, view), {'keys': keys})
557
+ else:
558
+ retval = self.get('/%s/_design/%s/_view/%s' % \
559
+ (self.name, design, view), encodedOptions)
560
+ if 'error' in retval:
561
+ raise RuntimeError("Error in CouchDB: viewError '%s' reason '%s'" % \
562
+ (retval['error'], retval['reason']))
563
+ else:
564
+ return retval
565
+
566
+ def loadList(self, design, list, view, options=None, keys=None):
567
+ """
568
+ Load data from a list function. This returns data that hasn't been
569
+ decoded, since a list can return data in any format. It is expected that
570
+ the caller of this function knows what data is being returned and how to
571
+ deal with it appropriately.
572
+ """
573
+ options = options or {}
574
+ keys = keys or []
575
+ encodedOptions = {}
576
+ for k, v in viewitems(options):
577
+ encodedOptions[k] = self.encode(v)
578
+
579
+ if keys:
580
+ if encodedOptions:
581
+ data = urllib.parse.urlencode(encodedOptions)
582
+ retval = self.post('/%s/_design/%s/_list/%s/%s?%s' % \
583
+ (self.name, design, list, view, data), {'keys': keys},
584
+ decode=PY3_STR_DECODER)
585
+ else:
586
+ retval = self.post('/%s/_design/%s/_list/%s/%s' % \
587
+ (self.name, design, list, view), {'keys': keys},
588
+ decode=PY3_STR_DECODER)
589
+ else:
590
+ retval = self.get('/%s/_design/%s/_list/%s/%s' % \
591
+ (self.name, design, list, view), encodedOptions,
592
+ decode=PY3_STR_DECODER)
593
+
594
+ return retval
595
+
596
+ def getDoc(self, docName):
597
+ """
598
+ Return a single document from the database.
599
+ """
600
+ try:
601
+ return self.get('/%s/%s' % (self.name, docName))
602
+ except CouchError as e:
603
+ # if empty dict, then doc does not exist in the db
604
+ if getattr(e, "data", None) == {}:
605
+ return {}
606
+ self.checkForCouchError(getattr(e, "status", None), getattr(e, "reason", None))
607
+
608
+ def allDocs(self, options=None, keys=None):
609
+ """
610
+ Return all the documents in the database
611
+ options is a dict type parameter which can be passed to _all_docs
612
+ id {'startkey': 'a', 'limit':2, 'include_docs': true}
613
+ keys is the list of key (ids) for doc to be returned
614
+ """
615
+ options = options or {}
616
+ keys = keys or []
617
+ encodedOptions = {}
618
+ for k, v in viewitems(options):
619
+ encodedOptions[k] = self.encode(v)
620
+
621
+ if keys:
622
+ if encodedOptions:
623
+ data = urllib.parse.urlencode(encodedOptions)
624
+ return self.post('/%s/_all_docs?%s' % (self.name, data),
625
+ {'keys': keys})
626
+ else:
627
+ return self.post('/%s/_all_docs' % self.name,
628
+ {'keys': keys})
629
+ else:
630
+ return self.get('/%s/_all_docs' % self.name, encodedOptions)
631
+
632
+ def info(self):
633
+ """
634
+ Return information about the databaes (size, number of documents etc).
635
+ """
636
+ return self.get('/%s/' % self.name)
637
+
638
+ def addAttachment(self, id, rev, value, name=None, contentType=None, checksum=None, add_checksum=False):
639
+ """
640
+ Add an attachment stored in value to a document identified by id at revision rev.
641
+ If specified the attachement will be uploaded as name, other wise the attachment is
642
+ named "attachment".
643
+
644
+ If not set CouchDB will try to determine contentType and default to text/plain.
645
+
646
+ If checksum is specified pass this to CouchDB, it will refuse if the MD5 checksum
647
+ doesn't match the one provided. If add_checksum is True calculate the checksum of
648
+ the attachment and pass that into CouchDB for validation. The checksum should be the
649
+ base64 encoded binary md5 (as returned by hashlib.md5().digest())
650
+ """
651
+ if name is None:
652
+ name = "attachment"
653
+ req_headers = {}
654
+
655
+ if add_checksum:
656
+ # calculate base64 encoded MD5
657
+ keyhash = hashlib.md5()
658
+ value_str = str(value) if not isinstance(value, (newstr, newbytes)) else value
659
+ keyhash.update(encodeUnicodeToBytes(value_str))
660
+ content_md5 = base64.b64encode(keyhash.digest())
661
+ req_headers['Content-MD5'] = decodeBytesToUnicode(content_md5) if PY3 else content_md5
662
+ elif checksum:
663
+ req_headers['Content-MD5'] = decodeBytesToUnicode(checksum) if PY3 else checksum
664
+ return self.put('/%s/%s/%s?rev=%s' % (self.name, id, name, rev),
665
+ value, encode=False,
666
+ contentType=contentType,
667
+ incoming_headers=req_headers)
668
+
669
+ def getAttachment(self, id, name="attachment"):
670
+ """
671
+ _getAttachment_
672
+
673
+ Retrieve an attachment for a couch document.
674
+ """
675
+ url = "/%s/%s/%s" % (self.name, id, name)
676
+ attachment = self.get(url, None, encode=False, decode=PY3_STR_DECODER)
677
+
678
+ # there has to be a better way to do this but if we're not de-jsoning
679
+ # the return values, then this is all I can do for error checking,
680
+ # right?
681
+ # TODO: MAKE BETTER ERROR HANDLING
682
+ if (attachment.find('{"error":"not_found","reason":"deleted"}') != -1):
683
+ raise RuntimeError("File not found, deleted")
684
+ if id == "nonexistantid":
685
+ print(attachment)
686
+ return attachment
687
+
688
+ def bulkDeleteByIDs(self, ids):
689
+ """
690
+ delete bulk documents
691
+ """
692
+ # do the safety check other wise it will delete whole db.
693
+ if not isinstance(ids, list):
694
+ raise RuntimeError("Bulk delete requires a list of ids, wrong data type")
695
+ if not ids:
696
+ return None
697
+
698
+ docs = self.allDocs(keys=ids)['rows']
699
+ for j in docs:
700
+ doc = {}
701
+ if "id" not in j:
702
+ print("Document not found: %s" % j)
703
+ continue
704
+ doc["_id"] = j['id']
705
+ doc["_rev"] = j['value']['rev']
706
+ self.queueDelete(doc)
707
+ return self.commit()
708
+
709
+
710
+ class RotatingDatabase(Database):
711
+ """
712
+ A rotating database is actually multiple databases:
713
+ - one active database (self)
714
+ - N inactive databases (waiting to be removed)
715
+ - one archive database
716
+ - one configuration/seed database
717
+
718
+ The active database is the one which serves current requests. It is active
719
+ for a certain time window and then archived and marked as inactive.
720
+
721
+ Inactive databases no longer recieve queries, although are still available
722
+ on the server. They are queued up for deletion. This allows you to have a
723
+ system where active databases are rotated daily and are kept in the server
724
+ for a week. Inactive databases have a document in them defined as:
725
+ {
726
+ '_id': 'inactive',
727
+ 'archived_at': TIMESTAMP, # added when archived
728
+ 'expires_at': TIMESTAMP+delta # added when archived
729
+ }
730
+ which is used to persist state across instatiations of the class.
731
+
732
+ The archive database stores the results of views on the active databases
733
+ once they are rotated out of service.
734
+
735
+ The configuration/seed database holds the following information:
736
+ * names of known inactive databases
737
+ * name of current active database
738
+ * name of archive database
739
+ * design documents needed to seed new databases
740
+
741
+ Once rotated the current active database is made inactive, a new active
742
+ database created, views are copied to the archive database as necessary and
743
+ the inactive databases queued for removal.
744
+ """
745
+
746
+ def __init__(self, dbname='database', url='http://localhost:5984',
747
+ size=1000, archivename=None, seedname=None,
748
+ timing=None, views=None):
749
+ """
750
+ dbaname: base name for databases, active databases will have
751
+ timestamp appended
752
+ url: url of the CouchDB server
753
+ size: how big the data queue can get
754
+ archivename: database to archive view results to, default is
755
+ dbname_archive
756
+ seedname: database where seed views and configuration/state are held
757
+ default is $dbname_seedcfg
758
+ timing: a dict containing two timedeltas 'archive' and 'expire',
759
+ if not present assume the database will br rotated by
760
+ external code
761
+ views: a list of views (design/name) to archive. The assumption
762
+ is that these views have been loaded into the seed
763
+ database via couchapp or someother process.
764
+ """
765
+ views = views or []
766
+ # Store the base database name
767
+ self.basename = dbname
768
+
769
+ # Since we're going to be making databases hold onto a server
770
+ self.server = CouchServer(url)
771
+
772
+ # self is the "active" database
773
+ Database.__init__(self, self._get_new_name(), url, size)
774
+ # forcibly make sure I exist
775
+ self.server.connectDatabase(self.name)
776
+
777
+ # Set up the databases for the seed
778
+ if not seedname:
779
+ seedname = '%s_seedcfg' % (self.basename)
780
+ self.seed_db = self.server.connectDatabase(seedname, url, size)
781
+
782
+ # TODO: load a rotating DB from the seed db
783
+
784
+ # TODO: Maybe call self._rotate() here?
785
+
786
+ self.timing = timing
787
+
788
+ self.archive_config = {}
789
+ self.archive_db = None
790
+ self.views = []
791
+ if views:
792
+ # If views isn't set in the constructor theres nothing to archive
793
+ if not archivename:
794
+ archivename = '%s_archive' % (self.basename)
795
+ # TODO: check that the views listed exist in the seed
796
+ # TODO: support passing in view options
797
+ self.views = views
798
+ self.archive_db = self.server.connectDatabase(archivename, url, size)
799
+ self.archive_config['views'] = self.views
800
+ self.archive_config['database'] = archivename
801
+ self.archive_config['type'] = 'archive_config'
802
+ self.archive_config['timing'] = str(self.timing)
803
+ # copy views from the seed to the active db
804
+ self._copy_views()
805
+ if self.archive_config:
806
+ # TODO: deal with multiple instances, load from doc?
807
+ self.seed_db.commitOne(self.archive_config)
808
+
809
+ def _get_new_name(self):
810
+ return '%s_%s' % (self.basename, int(time.time()))
811
+
812
+ def _copy_views(self):
813
+ """
814
+ Copy design documents from self.seed_db to the new active database.
815
+ This means that all views in the design doc are copied, regardless of
816
+ whether they are actually archived.
817
+ """
818
+ for design_to_copy in set(['_design/%s' % design.split('/')[0] for design in self.views]):
819
+ design = self.seed_db.document(design_to_copy)
820
+ del design['_rev']
821
+ self.queue(design)
822
+ self.commit()
823
+
824
+ def _rotate(self):
825
+ """
826
+ Rotate the active database:
827
+ 1. create the new active database
828
+ 2. set self.name to the new database name
829
+ 3. write the inactive document to the old active database
830
+ 4. write the inactive document to the seed db
831
+ """
832
+ retiring_db = self.server.connectDatabase(self.name)
833
+ # do the switcheroo
834
+ new_active_db = self.server.connectDatabase(self._get_new_name())
835
+ self.name = new_active_db.name
836
+ self._copy_views()
837
+ # "connect" to the old server, write inactive doc
838
+ retiring_db.commitOne({'_id': 'inactive'}, timestamp=True)
839
+
840
+ # record new inactive db to config
841
+ # TODO: update function?
842
+
843
+ state_doc = {'_id': retiring_db.name, 'rotate_state': 'inactive'}
844
+ if not self.archive_config:
845
+ # Not configured to archive anything, so skip inactive state
846
+ # set the old db as archived instead
847
+ state_doc['rotate_state'] = 'archived'
848
+ self.seed_db.commitOne(state_doc, timestamp=True)
849
+
850
+ def _archive(self):
851
+ """
852
+ Archive inactive databases
853
+ """
854
+ if self.archive_config:
855
+ # TODO: This should be a worker thread/pool thingy so it's non-blocking
856
+ for inactive_db in self.inactive_dbs():
857
+ archiving_db = Database(inactive_db, self['host'])
858
+ for view_to_archive in self.views:
859
+ # TODO: improve handling views and options here
860
+ design, view = view_to_archive.split('/')
861
+ for data in archiving_db.loadView(design, view, options={'group': True})['rows']:
862
+ self.archive_db.queue(data)
863
+ self.archive_db.commit()
864
+ # Now set the inactive view to archived
865
+ db_state = self.seed_db.document(inactive_db)
866
+ db_state['rotate_state'] = 'archived'
867
+ self.seed_db.commit(db_state)
868
+
869
+ def _expire(self):
870
+ """
871
+ Delete inactive databases that have expired, and remove state docs.
872
+ """
873
+ now = datetime.now()
874
+ then = now - self.timing['expire']
875
+
876
+ options = {'startkey': 0, 'endkey': int(time.mktime(then.timetuple()))}
877
+ expired = self._find_dbs_in_state('archived', options)
878
+ for db in expired:
879
+ try:
880
+ self.server.deleteDatabase(db['id'])
881
+ except CouchNotFoundError:
882
+ # if it's gone we don't care
883
+ pass
884
+ db_state = self.seed_db.document(db['id'])
885
+ self.seed_db.queueDelete(db_state)
886
+ self.seed_db.commit()
887
+
888
+ def _create_design_doc(self):
889
+ """Create a design doc with a view for the rotate state"""
890
+ tempDesignDoc = {'views': {
891
+ 'rotateState': {
892
+ 'map': "function(doc) {emit(doc.timestamp, doc.rotate_state, doc._id);}"
893
+ },
894
+ }
895
+ }
896
+ self.seed_db.put('/%s/_design/TempDesignDoc' % self.seed_db.name, tempDesignDoc)
897
+
898
+ def _find_dbs_in_state(self, state, options=None):
899
+ """Creates a design document with a single (temporary) view in it"""
900
+ options = options or {}
901
+ if self.seed_db.documentExists("_design/TempDesignDoc"):
902
+ logging.info("Skipping designDoc creation because it already exists!")
903
+ else:
904
+ self._create_design_doc()
905
+
906
+ data = self.seed_db.loadView("TempDesignDoc", "rotateState", options=options)
907
+ return data['rows']
908
+
909
+ def inactive_dbs(self):
910
+ """
911
+ Return a list on inactive databases
912
+ """
913
+ return [doc['value'] for doc in self._find_dbs_in_state('inactive')]
914
+
915
+ def archived_dbs(self):
916
+ """
917
+ Return a list of archived databases
918
+ """
919
+ return [doc['value'] for doc in self._find_dbs_in_state('archived')]
920
+
921
+ def makeRequest(self, uri=None, data=None, type='GET', incoming_headers=None,
922
+ encode=True, decode=True, contentType=None,
923
+ cache=False, rotate=True):
924
+ """
925
+ Intercept the request, determine if I need to rotate, then carry out the
926
+ request as normal.
927
+ """
928
+ incoming_headers = incoming_headers or {}
929
+ if self.timing and rotate:
930
+
931
+ # check to see whether I should rotate the database before processing the request
932
+ db_age = datetime.fromtimestamp(float(self.name.split('_')[-1]))
933
+ db_expires = db_age + self.timing['archive']
934
+ if datetime.now() > db_expires:
935
+ # save the current name for later
936
+ old_db = self.name
937
+ if self._queue:
938
+ # data I've got queued up should go to the old database
939
+ # can't call self.commit() due to recursion
940
+ uri = '/%s/_bulk_docs/' % self.name
941
+ data['docs'] = list(self._queue)
942
+ self.makeRequest(uri, data, 'POST', rotate=False)
943
+ self._reset_queue()
944
+ self._rotate() # make the new database
945
+ # The uri passed in will be wrong, and the db may no longer exist if it has expired
946
+ # so replacte the old name with the new
947
+ uri.replace(old_db, self.name, 1)
948
+ # write the data to the current database
949
+ Database.makeRequest(self, uri, data, type, incoming_headers, encode, decode, contentType, cache)
950
+ # now do some maintenance on the archived/expired databases
951
+ self._archive()
952
+ self._expire()
953
+
954
+
955
+ class CouchServer(CouchDBRequests):
956
+ """
957
+ An object representing the CouchDB server, use it to list, create, delete
958
+ and connect to databases.
959
+
960
+ More info http://wiki.apache.org/couchdb/HTTP_database_API
961
+ """
962
+
963
+ def __init__(self, dburl='http://localhost:5984', usePYCurl=True, ckey=None, cert=None, capath=None):
964
+ """
965
+ Set up a connection to the CouchDB server
966
+ """
967
+ check_server_url(dburl)
968
+ CouchDBRequests.__init__(self, url=dburl, usePYCurl=usePYCurl, ckey=ckey, cert=cert, capath=capath)
969
+ self.url = dburl
970
+ self.ckey = ckey
971
+ self.cert = cert
972
+
973
+ def getCouchWelcome(self):
974
+ """
975
+ Retrieve CouchDB welcome information (which includes the version number)
976
+ :return: a dictionary
977
+ """
978
+ return self.get('')
979
+
980
+ def listDatabases(self):
981
+ "List all the databases the server hosts"
982
+ return self.get('/_all_dbs')
983
+
984
+ def createDatabase(self, dbname, size=1000):
985
+ """
986
+ A database must be named with all lowercase characters (a-z),
987
+ digits (0-9), or any of the _$()+-/ characters and must end with a slash
988
+ in the URL.
989
+ """
990
+ check_name(dbname)
991
+
992
+ self.put("/%s" % urllib.parse.quote_plus(dbname))
993
+ # Pass the Database constructor the unquoted name - the constructor will
994
+ # quote it for us.
995
+ return Database(dbname=dbname, url=self.url, size=size, ckey=self.ckey, cert=self.cert)
996
+
997
+ def deleteDatabase(self, dbname):
998
+ """Delete a database from the server"""
999
+ check_name(dbname)
1000
+ dbname = urllib.parse.quote_plus(dbname)
1001
+ if "cmsweb" in self.url:
1002
+ msg = f"You can't be serious that you want to delete a PRODUCTION database!!! "
1003
+ msg += f"At url: {self.url}, for database name: {dbname}. Bailing out!"
1004
+ raise RuntimeError(msg)
1005
+ return self.delete("/%s" % dbname)
1006
+
1007
+ def connectDatabase(self, dbname='database', create=True, size=1000):
1008
+ """
1009
+ Return a Database instance, pointing to a database in the server. If the
1010
+ database doesn't exist create it if create is True.
1011
+ """
1012
+ check_name(dbname)
1013
+ if create and dbname not in self.listDatabases():
1014
+ return self.createDatabase(dbname)
1015
+ return Database(dbname=dbname, url=self.url, size=size, ckey=self.ckey, cert=self.cert)
1016
+
1017
+ def replicate(self, source, destination, continuous=False,
1018
+ create_target=False, cancel=False, doc_ids=False,
1019
+ filter=False, query_params=False, sleepSecs=0, selector=False):
1020
+ """
1021
+ Trigger replication between source and destination. CouchDB options are
1022
+ defined in: https://docs.couchdb.org/en/3.1.2/api/server/common.html#replicate
1023
+ with further details in: https://docs.couchdb.org/en/stable/replication/replicator.html
1024
+
1025
+ Source and destination need to be appropriately urlquoted after the port
1026
+ number. E.g. if you have a database with /'s in the name you need to
1027
+ convert them into %2F's.
1028
+
1029
+ TODO: Improve source/destination handling - can't simply URL quote,
1030
+ though, would need to decompose the URL and rebuild it.
1031
+
1032
+ :param source: string with the source url to replicate data from
1033
+ :param destination: string with the destination url to replicate data to
1034
+ :param continuous: boolean to perform a continuous replication or not
1035
+ :param create_target: boolean to create the target database, if non-existent
1036
+ :param cancel: boolean to stop a replication (but we better just delete the doc!)
1037
+ :param doc_ids: a list of specific doc ids that we would like to replicate
1038
+ :param filter: string with the name of the filter function to be used. Note that
1039
+ this filter is expected to have been defined in the design doc.
1040
+ :param query_params: dictionary of parameters to pass over to the filter function
1041
+ :param sleepSecs: amount of seconds to sleep after the replication job is created
1042
+ :param selector: a new'ish feature for filter functions in Erlang
1043
+ :return: status of the replication creation
1044
+ """
1045
+ listDbs = self.listDatabases()
1046
+ if source not in listDbs:
1047
+ check_server_url(source)
1048
+ if destination not in listDbs:
1049
+ if create_target and not destination.startswith("http"):
1050
+ check_name(destination)
1051
+ else:
1052
+ check_server_url(destination)
1053
+
1054
+ if not destination.startswith("http"):
1055
+ destination = '%s/%s' % (self.url, destination)
1056
+ if not source.startswith("http"):
1057
+ source = '%s/%s' % (self.url, source)
1058
+ data = {"source": source, "target": destination}
1059
+ # There must be a nicer way to do this, but I've not had coffee yet...
1060
+ if continuous: data["continuous"] = continuous
1061
+ if create_target: data["create_target"] = create_target
1062
+ if cancel: data["cancel"] = cancel
1063
+ if doc_ids: data["doc_ids"] = doc_ids
1064
+ if filter:
1065
+ data["filter"] = filter
1066
+ if query_params:
1067
+ data["query_params"] = query_params
1068
+ if selector: data["selector"] = selector
1069
+
1070
+ resp = self.post('/_replicator', data)
1071
+ # Sleep required for CouchDB 3.x unit tests
1072
+ time.sleep(sleepSecs)
1073
+ return resp
1074
+
1075
+ def status(self):
1076
+ """
1077
+ See what active tasks are running on the server.
1078
+ """
1079
+ return {'databases': self.listDatabases(),
1080
+ 'server_stats': self.get('/_stats'),
1081
+ 'active_tasks': self.get('/_active_tasks')}
1082
+
1083
+ def __str__(self):
1084
+ """
1085
+ List all the databases the server has
1086
+ """
1087
+ return self.listDatabases().__str__()
1088
+
1089
+
1090
+ # define some standard couch error classes
1091
+ # from:
1092
+ # http://wiki.apache.org/couchdb/HTTP_status_list
1093
+
1094
+ class CouchError(Exception):
1095
+ "An error thrown by CouchDB"
1096
+
1097
+ def __init__(self, reason, data, result, status=None):
1098
+ Exception.__init__(self)
1099
+ self.reason = reason
1100
+ self.data = data
1101
+ self.result = result
1102
+ self.type = "CouchError"
1103
+ self.status = status
1104
+
1105
+ def __str__(self):
1106
+ """Stringify the error"""
1107
+ errorMsg = ""
1108
+ if self.type == "CouchError":
1109
+ errorMsg += "A NEW COUCHDB ERROR TYPE/STATUS HAS BEEN FOUND! "
1110
+ errorMsg += "UPDATE CMSCOUCH.PY IMPLEMENTATION WITH A NEW COUCH ERROR/STATUS! "
1111
+ errorMsg += f"Status: {self.status}\n"
1112
+ errorMsg += f"Error type: {self.type}, Status code: {self.status}, "
1113
+ errorMsg += f"Reason: {self.reason}, Data: {repr(self.data)}"
1114
+ return errorMsg
1115
+
1116
+
1117
+ class CouchBadRequestError(CouchError):
1118
+ def __init__(self, reason, data, result, status):
1119
+ CouchError.__init__(self, reason, data, result, status)
1120
+ self.type = "CouchBadRequestError"
1121
+
1122
+
1123
+ class CouchUnauthorisedError(CouchError):
1124
+ def __init__(self, reason, data, result, status):
1125
+ CouchError.__init__(self, reason, data, result, status)
1126
+ self.type = "CouchUnauthorisedError"
1127
+
1128
+
1129
+ class CouchNotFoundError(CouchError):
1130
+ def __init__(self, reason, data, result, status):
1131
+ CouchError.__init__(self, reason, data, result, status)
1132
+ self.type = "CouchNotFoundError"
1133
+
1134
+
1135
+ class CouchNotAllowedError(CouchError):
1136
+ def __init__(self, reason, data, result, status):
1137
+ CouchError.__init__(self, reason, data, result, status)
1138
+ self.type = "CouchNotAllowedError"
1139
+
1140
+ class CouchNotAcceptableError(CouchError):
1141
+ def __init__(self, reason, data, result, status):
1142
+ CouchError.__init__(self, reason, data, result, status)
1143
+ self.type = "CouchNotAcceptableError"
1144
+
1145
+ class CouchConflictError(CouchError):
1146
+ def __init__(self, reason, data, result, status):
1147
+ CouchError.__init__(self, reason, data, result, status)
1148
+ self.type = "CouchConflictError"
1149
+
1150
+
1151
+ class CouchFeatureGone(CouchError):
1152
+ def __init__(self, reason, data, result, status):
1153
+ CouchError.__init__(self, reason, data, result, status)
1154
+ self.type = "CouchFeatureGone"
1155
+
1156
+
1157
+ class CouchPreconditionFailedError(CouchError):
1158
+ def __init__(self, reason, data, result, status):
1159
+ CouchError.__init__(self, reason, data, result, status)
1160
+ self.type = "CouchPreconditionFailedError"
1161
+
1162
+
1163
+ class CouchRequestTooLargeError(CouchError):
1164
+ def __init__(self, reason, data, result, status):
1165
+ # calculate the size of this JSON serialized object
1166
+ docSize = sys.getsizeof(json.dumps(data))
1167
+ errorMsg = f"Document has {docSize} bytes and it's too large to be accepted by CouchDB. "
1168
+ errorMsg += f"Check the CouchDB configuration to see the current value "
1169
+ errorMsg += f"under 'couchdb.max_document_size' (default is 8M bytes)."
1170
+ CouchError.__init__(self, reason, errorMsg, result, status)
1171
+ self.type = "CouchRequestTooLargeError"
1172
+
1173
+
1174
+ class CouchExpectationFailedError(CouchError):
1175
+ def __init__(self, reason, data, result, status):
1176
+ CouchError.__init__(self, reason, data, result, status)
1177
+ self.type = "CouchExpectationFailedError"
1178
+
1179
+ class CouchRequestedRangeNotSatisfiableError(CouchError):
1180
+ def __init__(self, reason, data, result, status):
1181
+ CouchError.__init__(self, reason, data, result, status)
1182
+ self.type = "CouchRequestedRangeNotSatisfiableError"
1183
+
1184
+
1185
+ class CouchInternalServerError(CouchError):
1186
+ def __init__(self, reason, data, result, status):
1187
+ CouchError.__init__(self, reason, data, result, status)
1188
+ self.type = "CouchInternalServerError"
1189
+
1190
+
1191
+ class CouchForbidden(CouchError):
1192
+ def __init__(self, reason, data, result, status):
1193
+ CouchError.__init__(self, reason, data, result, status)
1194
+ self.type = "CouchForbidden"
1195
+
1196
+
1197
+ class CouchMonitor(object):
1198
+ def __init__(self, couchURL):
1199
+ if isinstance(couchURL, CouchServer):
1200
+ self.couchServer = couchURL
1201
+ else:
1202
+ self.couchServer = CouchServer(couchURL)
1203
+
1204
+ self.replicatorDB = self.couchServer.connectDatabase('_replicator', False)
1205
+
1206
+ # use the CouchDB version to decide which APIs and schema is available
1207
+ couchInfo = self.couchServer.getCouchWelcome()
1208
+ self.couchVersion = couchInfo.get("version")
1209
+
1210
+ def deleteReplicatorDocs(self, source=None, target=None, repDocs=None):
1211
+ if repDocs is None:
1212
+ repDocs = self.replicatorDB.allDocs(options={'include_docs': True})['rows']
1213
+
1214
+ filteredDocs = self._filterReplicationDocs(repDocs, source, target)
1215
+ if not filteredDocs:
1216
+ return
1217
+ for doc in filteredDocs:
1218
+ self.replicatorDB.queueDelete(doc)
1219
+ return self.replicatorDB.commit()
1220
+
1221
+ def _filterReplicationDocs(self, repDocs, source, target):
1222
+ filteredDocs = []
1223
+ for j in repDocs:
1224
+ if '_design' not in j['id']:
1225
+ if (source is None and target is None) or \
1226
+ (j['doc']['source'] == source and j['doc']['target'] == target):
1227
+ doc = {}
1228
+ doc["_id"] = j['id']
1229
+ doc["_rev"] = j['value']['rev']
1230
+ filteredDocs.append(doc)
1231
+ return filteredDocs
1232
+
1233
+ def getActiveTasks(self):
1234
+ """
1235
+ Return all the active tasks in Couch (compaction, replication, indexing, etc)
1236
+ :return: a list with the current active tasks
1237
+
1238
+ For further information:
1239
+ https://docs.couchdb.org/en/3.1.2/api/server/common.html#active-tasks
1240
+ """
1241
+ return self.couchServer.get("/_active_tasks")
1242
+
1243
+ def getSchedulerJobs(self):
1244
+ """
1245
+ Return all replication jobs created either via _replicate or _replicator dbs.
1246
+ It does not include replications that have either completed or failed.
1247
+ :return: a list with the current replication jobs
1248
+
1249
+ For further information:
1250
+ https://docs.couchdb.org/en/3.1.2/api/server/common.html#api-server-scheduler-jobs
1251
+ """
1252
+ resp = []
1253
+ data = self.couchServer.get("/_scheduler/jobs")
1254
+ return data.get("jobs", resp)
1255
+
1256
+ def getSchedulerDocs(self):
1257
+ """
1258
+ Return all replication documents and their states, even if they have completed or
1259
+ failed.
1260
+ :return: a list with the current replication docs
1261
+
1262
+ Replication states can be found at:
1263
+ https://docs.couchdb.org/en/3.1.2/replication/replicator.html#replicator-states
1264
+ For further information:
1265
+ https://docs.couchdb.org/en/3.1.2/api/server/common.html#api-server-scheduler-docs
1266
+ """
1267
+ # NOTE: if there are no docs, this call can give a response like:
1268
+ # {"error":"not_found","reason":"Database does not exist."}
1269
+ resp = []
1270
+ try:
1271
+ data = self.couchServer.get("/_scheduler/docs")
1272
+ except CouchNotFoundError as exc:
1273
+ logging.warning("/_scheduler/docs API returned: %s", getattr(exc, "result", ""))
1274
+ return resp
1275
+ return data.get("docs", resp)
1276
+
1277
+ def checkCouchReplications(self, replicationsList):
1278
+ """
1279
+ Check whether the list of expected replications exist in CouchDB
1280
+ and also check their status.
1281
+
1282
+ :param replicationsList: a list of dictionary with the replication
1283
+ document setup.
1284
+ :return: a dictionary with the status of the replications and an
1285
+ error message
1286
+ """
1287
+ activeTasks = self.getActiveTasks()
1288
+ # filter out any task that is not a database replication
1289
+ activeTasks = [task for task in activeTasks if task["type"].lower() == "replication"]
1290
+
1291
+ if len(replicationsList) != len(activeTasks):
1292
+ msg = f"Expected to have {len(replicationsList)} replication tasks, "
1293
+ msg += f"but only {len(activeTasks)} in CouchDB. "
1294
+ msg += f"Current replications are: {activeTasks}"
1295
+ return {'status': 'error', 'error_message': msg}
1296
+
1297
+ resp = self.checkReplicationState()
1298
+ if resp['status'] != 'ok':
1299
+ # then there is a problem, return its status
1300
+ return resp
1301
+
1302
+ # finally, check if replications are being updated in a timely fashion
1303
+ for replTask in activeTasks:
1304
+ if not self.isReplicationOK(replTask):
1305
+ source = sanitizeURL(replTask['source'])['url']
1306
+ target = sanitizeURL(replTask['target'])['url']
1307
+ msg = f"Replication from {source} to {target} is stale and it's last"
1308
+ msg += f"update time was at: {replTask.get('updated_on')}"
1309
+ resp['status'] = 'error'
1310
+ resp['error_message'] += msg
1311
+ return resp
1312
+
1313
+ def checkReplicationState(self):
1314
+ """
1315
+ Check the state of the existent replication tasks.
1316
+ NOTE that this can't be done for CouchDB 1.6, since there is
1317
+ replication state.
1318
+
1319
+ :return: a dictionary with the status of the replications and an
1320
+ error message
1321
+ """
1322
+ resp = {'status': 'ok', 'error_message': ""}
1323
+ if self.couchVersion == "1.6.1":
1324
+ return resp
1325
+
1326
+ for replDoc in self.getSchedulerDocs():
1327
+ if replDoc['state'].lower() not in ["pending", "running"]:
1328
+ source = sanitizeURL(replDoc['source'])['url']
1329
+ target = sanitizeURL(replDoc['target'])['url']
1330
+ msg = f"Replication from {source} to {target} is in a bad state: {replDoc.get('state')}; "
1331
+ resp['status'] = "error"
1332
+ resp['error_message'] += msg
1333
+ return resp
1334
+
1335
+ def isReplicationOK(self, replInfo):
1336
+ """
1337
+ Ensure that the replication document is up-to-date as a
1338
+ function of the checkpoint interval.
1339
+
1340
+ :param replInfo: dictionary with the replication information
1341
+ :return: True if replication is working fine, otherwise False
1342
+ """
1343
+ maxUpdateInterval = replInfo['checkpoint_interval'] / 1000
1344
+ lastUpdate = replInfo["updated_on"]
1345
+
1346
+ if lastUpdate + maxUpdateInterval > int(time.time()):
1347
+ # then it has been recently updated
1348
+ return True
1349
+ return False