DIRAC 9.0.0a59__py3-none-any.whl → 9.0.0a61__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. DIRAC/AccountingSystem/Client/AccountingCLI.py +0 -140
  2. DIRAC/AccountingSystem/Client/DataStoreClient.py +0 -13
  3. DIRAC/AccountingSystem/Client/Types/BaseAccountingType.py +0 -7
  4. DIRAC/AccountingSystem/ConfigTemplate.cfg +0 -5
  5. DIRAC/AccountingSystem/Service/DataStoreHandler.py +0 -72
  6. DIRAC/Core/DISET/private/BaseClient.py +1 -2
  7. DIRAC/Core/scripts/dirac_apptainer_exec.py +2 -1
  8. DIRAC/DataManagementSystem/Agent/RequestOperations/RemoveFile.py +7 -6
  9. DIRAC/DataManagementSystem/Utilities/DMSHelpers.py +5 -1
  10. DIRAC/FrameworkSystem/Client/ComponentInstaller.py +4 -2
  11. DIRAC/MonitoringSystem/DB/MonitoringDB.py +0 -20
  12. DIRAC/MonitoringSystem/Service/MonitoringHandler.py +0 -33
  13. DIRAC/Resources/Computing/BatchSystems/Condor.py +108 -109
  14. DIRAC/Resources/Computing/HTCondorCEComputingElement.py +33 -15
  15. DIRAC/Resources/Computing/test/Test_HTCondorCEComputingElement.py +67 -49
  16. DIRAC/Resources/Storage/StorageBase.py +4 -2
  17. DIRAC/WorkloadManagementSystem/Client/DownloadInputData.py +3 -2
  18. DIRAC/WorkloadManagementSystem/Client/test/Test_Client_DownloadInputData.py +29 -0
  19. DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapper.py +9 -6
  20. DIRAC/WorkloadManagementSystem/Service/PilotManagerHandler.py +6 -3
  21. {dirac-9.0.0a59.dist-info → dirac-9.0.0a61.dist-info}/METADATA +1 -1
  22. {dirac-9.0.0a59.dist-info → dirac-9.0.0a61.dist-info}/RECORD +26 -26
  23. {dirac-9.0.0a59.dist-info → dirac-9.0.0a61.dist-info}/WHEEL +0 -0
  24. {dirac-9.0.0a59.dist-info → dirac-9.0.0a61.dist-info}/entry_points.txt +0 -0
  25. {dirac-9.0.0a59.dist-info → dirac-9.0.0a61.dist-info}/licenses/LICENSE +0 -0
  26. {dirac-9.0.0a59.dist-info → dirac-9.0.0a61.dist-info}/top_level.txt +0 -0
@@ -57,99 +57,6 @@ class AccountingCLI(CLI):
57
57
  traceback.print_tb(sys.exc_info()[2])
58
58
  print("________________________\n")
59
59
 
60
- def do_registerType(self, args):
61
- """
62
- Registers a new accounting type
63
- Usage : registerType <typeName>
64
- <DIRACRoot>/DIRAC/AccountingSystem/Client/Types/<typeName>
65
- should exist and inherit the base type
66
- """
67
- try:
68
- argList = args.split()
69
- if argList:
70
- typeName = argList[0].strip()
71
- else:
72
- gLogger.error("No type name specified")
73
- return
74
- # Try to import the type
75
- result = self.objectLoader.loadObject(f"DIRAC.AccountingSystem.Client.Types.{typeName}")
76
- if not result["OK"]:
77
- return result
78
- typeClass = result["Value"]
79
-
80
- gLogger.info(f"Loaded type {typeClass.__name__}")
81
- typeDef = typeClass().getDefinition()
82
- acClient = DataStoreClient()
83
- retVal = acClient.registerType(*typeDef)
84
- if retVal["OK"]:
85
- gLogger.info("Type registered successfully")
86
- else:
87
- gLogger.error(f"Error: {retVal['Message']}")
88
- except Exception:
89
- self.showTraceback()
90
-
91
- def do_resetBucketLength(self, args):
92
- """
93
- Set the bucket Length. Will trigger a recalculation of buckets. Can take a while.
94
- Usage : resetBucketLength <typeName>
95
- <DIRACRoot>/DIRAC/AccountingSystem/Client/Types/<typeName>
96
- should exist and inherit the base type
97
- """
98
- try:
99
- argList = args.split()
100
- if argList:
101
- typeName = argList[0].strip()
102
- else:
103
- gLogger.error("No type name specified")
104
- return
105
-
106
- # Try to import the type
107
- result = self.objectLoader.loadObject(f"DIRAC.AccountingSystem.Client.Types.{typeName}")
108
- if not result["OK"]:
109
- return result
110
- typeClass = result["Value"]
111
- gLogger.info(f"Loaded type {typeClass.__name__}")
112
- typeDef = typeClass().getDefinition()
113
- acClient = DataStoreClient()
114
- retVal = acClient.setBucketsLength(typeDef[0], typeDef[3])
115
- if retVal["OK"]:
116
- gLogger.info("Type registered successfully")
117
- else:
118
- gLogger.error(f"Error: {retVal['Message']}")
119
- except Exception:
120
- self.showTraceback()
121
-
122
- def do_regenerateBuckets(self, args):
123
- """
124
- Regenerate buckets for type. Can take a while.
125
- Usage : regenerateBuckets <typeName>
126
- <DIRACRoot>/DIRAC/AccountingSystem/Client/Types/<typeName>
127
- should exist and inherit the base type
128
- """
129
- try:
130
- argList = args.split()
131
- if argList:
132
- typeName = argList[0].strip()
133
- else:
134
- gLogger.error("No type name specified")
135
- return
136
-
137
- # Try to import the type
138
- result = self.objectLoader.loadObject(f"DIRAC.AccountingSystem.Client.Types.{typeName}")
139
- if not result["OK"]:
140
- return result
141
- typeClass = result["Value"]
142
- gLogger.info(f"Loaded type {typeClass.__name__}")
143
- typeDef = typeClass().getDefinition()
144
- acClient = DataStoreClient()
145
- retVal = acClient.regenerateBuckets(typeDef[0])
146
- if retVal["OK"]:
147
- gLogger.info("Buckets recalculated!")
148
- else:
149
- gLogger.error(f"Error: {retVal['Message']}")
150
- except Exception:
151
- self.showTraceback()
152
-
153
60
  def do_showRegisteredTypes(self, args):
154
61
  """
155
62
  Get a list of registered types
@@ -170,50 +77,3 @@ class AccountingCLI(CLI):
170
77
  print(" Value fields:\n %s" % "\n ".join(typeList[2]))
171
78
  except Exception:
172
79
  self.showTraceback()
173
-
174
- def do_deleteType(self, args):
175
- """
176
- Delete a registered accounting type.
177
- Usage : deleteType <typeName>
178
- WARN! It will delete all data associated to that type! VERY DANGEROUS!
179
- If you screw it, you'll discover a new dimension of pain and doom! :)
180
- """
181
- try:
182
- argList = args.split()
183
- if argList:
184
- typeName = argList[0].strip()
185
- else:
186
- gLogger.error("No type name specified")
187
- return
188
-
189
- choice = input(
190
- f"Are you completely sure you want to delete type {typeName} and all it's data? yes/no [no]: "
191
- )
192
- choice = choice.lower()
193
- if choice not in ("yes", "y"):
194
- print("Delete aborted")
195
- return
196
-
197
- acClient = DataStoreClient()
198
- retVal = acClient.deleteType(typeName)
199
- if not retVal["OK"]:
200
- gLogger.error(f"Error: {retVal['Message']}")
201
- return
202
- print("Hope you meant it, because it's done")
203
- except Exception:
204
- self.showTraceback()
205
-
206
- def do_compactBuckets(self, args):
207
- """
208
- Compact buckets table
209
- Usage : compactBuckets
210
- """
211
- try:
212
- acClient = DataStoreClient()
213
- retVal = acClient.compactDB()
214
- if not retVal["OK"]:
215
- gLogger.error(f"Error: {retVal['Message']}")
216
- return
217
- gLogger.info("Done")
218
- except Exception:
219
- self.showTraceback()
@@ -122,19 +122,6 @@ class DataStoreClient(Client):
122
122
 
123
123
  return S_OK()
124
124
 
125
- def remove(self, register):
126
- """
127
- Remove a Register from the Accounting DataStore
128
- """
129
- if not self.__checkBaseType(register.__class__):
130
- return S_ERROR("register is not a valid type (has to inherit from BaseAccountingType")
131
- retVal = register.checkValues()
132
- if not retVal["OK"]:
133
- return retVal
134
- if gConfig.getValue("/LocalSite/DisableAccounting", False):
135
- return S_OK()
136
- return self._getRPC().remove(*register.getValues())
137
-
138
125
 
139
126
  def _sendToFailover(rpcStub):
140
127
  """Create a ForwardDISET operation for failover"""
@@ -161,13 +161,6 @@ class BaseAccountingType:
161
161
  cD[self.fieldsList[iPos]] = self.valuesList[iPos]
162
162
  return cD
163
163
 
164
- def registerToServer(self):
165
- """
166
- Register type in server
167
- """
168
- rpcClient = Client(url="Accounting/DataStore")
169
- return rpcClient.registerType(*self.getDefinition())
170
-
171
164
  def commit(self):
172
165
  """
173
166
  Commit register to server
@@ -9,11 +9,6 @@ Services
9
9
  Authorization
10
10
  {
11
11
  Default = authenticated
12
- compactDB = ServiceAdministrator
13
- deleteType = ServiceAdministrator
14
- registerType = ServiceAdministrator
15
- setBucketsLength = ServiceAdministrator
16
- regenerateBuckets = ServiceAdministrator
17
12
  }
18
13
  }
19
14
  ##END
@@ -14,7 +14,6 @@ import datetime
14
14
  from DIRAC import S_ERROR, S_OK
15
15
  from DIRAC.AccountingSystem.DB.MultiAccountingDB import MultiAccountingDB
16
16
  from DIRAC.ConfigurationSystem.Client import PathFinder
17
- from DIRAC.Core.Base.Client import Client
18
17
  from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption
19
18
  from DIRAC.Core.Utilities import TimeUtilities
20
19
  from DIRAC.Core.Utilities.ThreadScheduler import gThreadScheduler
@@ -39,30 +38,6 @@ class DataStoreHandler(RequestHandler):
39
38
  gThreadScheduler.addPeriodicTask(60, cls.__acDB.loadPendingRecords)
40
39
  return S_OK()
41
40
 
42
- types_registerType = [str, list, list, list]
43
-
44
- def export_registerType(self, typeName, definitionKeyFields, definitionAccountingFields, bucketsLength):
45
- """
46
- Register a new type. (Only for all powerful admins)
47
- """
48
- return self.__acDB.registerType(typeName, definitionKeyFields, definitionAccountingFields, bucketsLength)
49
-
50
- types_setBucketsLength = [str, list]
51
-
52
- def export_setBucketsLength(self, typeName, bucketsLength):
53
- """
54
- Change the buckets Length. (Only for all powerful admins)
55
- """
56
- return self.__acDB.changeBucketsLength(typeName, bucketsLength)
57
-
58
- types_regenerateBuckets = [str]
59
-
60
- def export_regenerateBuckets(self, typeName):
61
- """
62
- Recalculate buckets. (Only for all powerful admins)
63
- """
64
- return self.__acDB.regenerateBuckets(typeName)
65
-
66
41
  types_getRegisteredTypes = []
67
42
 
68
43
  def export_getRegisteredTypes(self):
@@ -106,51 +81,4 @@ class DataStoreHandler(RequestHandler):
106
81
  records.append((entry[0], startTime, endTime, entry[3]))
107
82
  return self.__acDB.insertRecordBundleThroughQueue(records)
108
83
 
109
- types_compactDB = []
110
-
111
- def export_compactDB(self):
112
- """
113
- Compact the db by grouping buckets
114
- """
115
- # if we are running workers (not only one service) we can redirect the request to the master
116
- # For more information please read the Administrative guide Accounting part!
117
- # ADVICE: If you want to trigger the bucketing, please make sure the bucketing is not running!!!!
118
- if self.runBucketing:
119
- return self.__acDB.compactBuckets()
120
-
121
- return Client(url="Accounting/DataStoreMaster").compactDB()
122
-
123
84
  types_remove = [str, datetime.datetime, datetime.datetime, list]
124
-
125
- def export_remove(self, typeName, startTime, endTime, valuesList):
126
- """
127
- Remove a record for a type
128
- """
129
- startTime = int(TimeUtilities.toEpoch(startTime))
130
- endTime = int(TimeUtilities.toEpoch(endTime))
131
- return self.__acDB.deleteRecord(typeName, startTime, endTime, valuesList)
132
-
133
- types_removeRegisters = [list]
134
-
135
- def export_removeRegisters(self, entriesList):
136
- """
137
- Remove a record for a type
138
- """
139
- expectedTypes = [str, datetime.datetime, datetime.datetime, list]
140
- for entry in entriesList:
141
- if len(entry) != 4:
142
- return S_ERROR("Invalid records")
143
- for i, en in enumerate(entry):
144
- if not isinstance(en, expectedTypes[i]):
145
- return S_ERROR(f"{i} field in the records should be {expectedTypes[i]}")
146
- ok = 0
147
- for entry in entriesList:
148
- startTime = int(TimeUtilities.toEpoch(entry[1]))
149
- endTime = int(TimeUtilities.toEpoch(entry[2]))
150
- record = entry[3]
151
- result = self.__acDB.deleteRecord(entry[0], startTime, endTime, record)
152
- if not result["OK"]:
153
- return S_OK(ok)
154
- ok += 1
155
-
156
- return S_OK(ok)
@@ -323,7 +323,7 @@ class BaseClient:
323
323
  pass
324
324
 
325
325
  # We randomize the list, and add at the end the failover URLs (System/FailoverURLs/Component)
326
- urlsList = List.randomize(List.fromChar(urls, ",")) + failoverUrls
326
+ urlsList = List.fromChar(urls, ",") + failoverUrls
327
327
  self.__nbOfUrls = len(urlsList)
328
328
  self.__nbOfRetry = (
329
329
  2 if self.__nbOfUrls > 2 else 3
@@ -445,7 +445,6 @@ and this is thread {cThID}
445
445
  return self.__initStatus
446
446
  if self.__enableThreadCheck:
447
447
  self.__checkThreadID()
448
-
449
448
  gLogger.debug(f"Trying to connect to: {self.serviceURL}")
450
449
  try:
451
450
  # Calls the transport method of the apropriate protocol.
@@ -100,7 +100,8 @@ def main():
100
100
  gLogger.error(result["Message"])
101
101
  DIRAC.exit(1)
102
102
  if result["Value"][0] != 0:
103
- gLogger.error(result["Value"][2])
103
+ gLogger.error("Apptainer command failed with exit code", result["Value"][0])
104
+ gLogger.error("Command output:", result["Value"])
104
105
  DIRAC.exit(2)
105
106
  gLogger.notice(result["Value"][1])
106
107
 
@@ -4,17 +4,17 @@
4
4
  # Date: 2013/03/25 07:44:19
5
5
  ########################################################################
6
6
 
7
- """ :mod: RemoveFile
7
+ """:mod: RemoveFile
8
8
 
9
- ================
9
+ ================
10
10
 
11
- .. module: RemoveFile
11
+ .. module: RemoveFile
12
12
 
13
- :synopsis: removeFile operation handler
13
+ :synopsis: removeFile operation handler
14
14
 
15
- .. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
15
+ .. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
16
16
 
17
- removeFile operation handler
17
+ removeFile operation handler
18
18
  """
19
19
  # #
20
20
  # @file RemoveFile.py
@@ -132,6 +132,7 @@ class RemoveFile(DMSRequestOperationsBase):
132
132
  self.rmsMonitoringReporter.addRecord(
133
133
  self.createRMSRecord("Successful", len(toRemoveDict) - len(bulkRemoval["Value"]))
134
134
  )
135
+ toRemoveDict = bulkRemoval["Value"]
135
136
 
136
137
  # # 2nd step - single file removal
137
138
  for lfn, opFile in toRemoveDict.items():
@@ -1,7 +1,8 @@
1
1
  """
2
- This module contains helper methods for accessing operational attributes or parameters of DMS objects
2
+ This module contains helper methods for accessing operational attributes or parameters of DMS objects
3
3
 
4
4
  """
5
+
5
6
  from collections import defaultdict
6
7
  from DIRAC import gConfig, gLogger, S_OK, S_ERROR
7
8
  from DIRAC.ConfigurationSystem.Client.Helpers.Path import cfgPath
@@ -17,6 +18,9 @@ sLog = gLogger.getSubLogger(__name__)
17
18
  def resolveSEGroup(seGroupList, allSEs=None):
18
19
  """
19
20
  Resolves recursively a (list of) SEs that can be groupSEs
21
+ For modules, JobWrapper or whatever runs at a given site,
22
+ prefer using :py:func:`~DIRAC.DataManagementSystem.Utilities.ResolveSE.getDestinationSEList`
23
+
20
24
 
21
25
  :param seGroupList: list of SEs to resolve or comma-separated SEs
22
26
  :type seGroupList: list or string
@@ -60,7 +60,6 @@ from collections import defaultdict
60
60
 
61
61
  import importlib_metadata as metadata
62
62
  import importlib_resources
63
- import MySQLdb
64
63
  from diraccfg import CFG
65
64
  from prompt_toolkit import prompt
66
65
 
@@ -96,7 +95,6 @@ from DIRAC.Core.Utilities.Extensions import (
96
95
  findServices,
97
96
  )
98
97
  from DIRAC.Core.Utilities.File import mkDir, mkLink
99
- from DIRAC.Core.Utilities.MySQL import MySQL
100
98
  from DIRAC.Core.Utilities.PrettyPrint import printTable
101
99
  from DIRAC.Core.Utilities.ReturnValues import S_ERROR, S_OK
102
100
  from DIRAC.Core.Utilities.Subprocess import systemCall
@@ -2055,6 +2053,8 @@ class ComponentInstaller:
2055
2053
  """
2056
2054
  Install requested DB in MySQL server
2057
2055
  """
2056
+ import MySQLdb
2057
+
2058
2058
  dbName = MySQLdb.escape_string(dbName.encode()).decode()
2059
2059
  if not self.mysqlRootPwd:
2060
2060
  rootPwdPath = cfgInstallPath("Database", "RootPwd")
@@ -2202,6 +2202,8 @@ class ComponentInstaller:
2202
2202
  """
2203
2203
  Execute MySQL Command
2204
2204
  """
2205
+ from DIRAC.Core.Utilities.MySQL import MySQL
2206
+
2205
2207
  if not self.mysqlRootPwd:
2206
2208
  return S_ERROR("MySQL root password is not defined")
2207
2209
  if dbName not in self.db:
@@ -447,26 +447,6 @@ class MonitoringDB(ElasticDB):
447
447
  records.append({paramName: getattr(resObj["_source"], paramName) for paramName in paramNames})
448
448
  return S_OK(records)
449
449
 
450
- def getLastDayData(self, typeName, condDict):
451
- """
452
- It returns the last day data for a given monitoring type.
453
-
454
- :returns: for example
455
-
456
- .. code-block:: python
457
-
458
- {'sort': [{'timestamp': {'order': 'desc'}}],
459
- 'query': {'bool': {'must': [{'match': {'host': 'dzmathe.cern.ch'}},
460
- {'match': {'component': 'Bookkeeping_BookkeepingManager'}}]}}}
461
-
462
- :param str typeName: name of the monitoring type
463
- :param dict condDict: conditions for the query
464
-
465
- * key -> name of the field
466
- * value -> list of possible values
467
- """
468
- return self.__getRawData(typeName, condDict)
469
-
470
450
  def getLimitedData(self, typeName, condDict, size=10):
471
451
  """
472
452
  Returns a list of records for a given selection.
@@ -244,24 +244,6 @@ class MonitoringHandlerMixin:
244
244
  reportRequest["generatePlot"] = False
245
245
  return reporter.generate(reportRequest)
246
246
 
247
- types_addMonitoringRecords = [str, list]
248
-
249
- def export_addMonitoringRecords(self, monitoringtype, data):
250
- """
251
- Bulk insert data directly to the given monitoring type.
252
-
253
- :param str monitoringtype: monitoring type name
254
- :param list data: list of documents
255
- :returns: S_OK or S_ERROR
256
- """
257
-
258
- retVal = self.__db.getIndexName(monitoringtype)
259
- if not retVal["OK"]:
260
- return retVal
261
- prefix = retVal["Value"]
262
- gLogger.debug("addMonitoringRecords:", prefix)
263
- return self.__db.bulk_index(prefix, data)
264
-
265
247
  types_addRecords = [str, str, list]
266
248
 
267
249
  def export_addRecords(self, indexname, monitoringType, data):
@@ -290,21 +272,6 @@ class MonitoringHandlerMixin:
290
272
  gLogger.debug("delete index:", indexName)
291
273
  return self.__db.deleteIndex(indexName)
292
274
 
293
- types_getLastDayData = [str, dict]
294
-
295
- def export_getLastDayData(self, typeName, condDict):
296
- """
297
- It returns the data from the last day index. Note: we create daily indexes.
298
-
299
- :param str typeName: name of the monitoring type
300
- :param dict condDict: conditions for the query
301
-
302
- * key -> name of the field
303
- * value -> list of possible values
304
- """
305
-
306
- return self.__db.getLastDayData(typeName, condDict)
307
-
308
275
  types_getLimitedDat = [str, dict, int]
309
276
 
310
277
  def export_getLimitedData(self, typeName, condDict, size):