DIRAC 9.0.8__py3-none-any.whl → 9.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -82,7 +82,6 @@ def generateDocs(data, withTimeStamp=True):
82
82
 
83
83
 
84
84
  class ElasticSearchDB:
85
-
86
85
  """
87
86
  .. class:: ElasticSearchDB
88
87
 
@@ -506,7 +505,7 @@ class ElasticSearchDB:
506
505
  indexName = self.generateFullIndexName(indexPrefix, period)
507
506
  else:
508
507
  indexName = indexPrefix
509
- sLog.debug(f"Bulk indexing into {indexName} of {data}")
508
+ sLog.debug(f"Bulk indexing into {indexName} of {len(data)}")
510
509
 
511
510
  res = self.existingIndex(indexName)
512
511
  if not res["OK"]:
@@ -1,7 +1,7 @@
1
1
  """
2
2
  DIRAC Wrapper to execute python and system commands with a wrapper, that might
3
3
  set a timeout.
4
- 3 FUNCTIONS are provided:
4
+ 3 functions are provided:
5
5
 
6
6
  - shellCall( iTimeOut, cmdSeq, callbackFunction = None, env = None ):
7
7
  it uses subprocess.Popen class with "shell = True".
@@ -26,6 +26,7 @@ set a timeout.
26
26
  should be used to wrap third party python functions
27
27
 
28
28
  """
29
+
29
30
  import os
30
31
  import selectors
31
32
  import signal
@@ -161,7 +162,6 @@ class Subprocess:
161
162
 
162
163
  self.child = None
163
164
  self.childPID = 0
164
- self.childKilled = False
165
165
  self.callback = None
166
166
  self.bufferList = []
167
167
  self.cmdSeq = []
@@ -193,7 +193,7 @@ class Subprocess:
193
193
  f"First and last data in buffer: \n{dataString[:100]} \n....\n {dataString[-100:]} ",
194
194
  )
195
195
  retDict = S_ERROR(
196
- "Reached maximum allowed length (%d bytes) " "for called function return value" % self.bufferLimit
196
+ "Reached maximum allowed length (%d bytes) for called function return value" % self.bufferLimit
197
197
  )
198
198
  retDict["Value"] = dataString
199
199
  return retDict
@@ -241,60 +241,71 @@ class Subprocess:
241
241
  events = sel.select(timeout=timeout or self.timeout or None)
242
242
  return [key.fileobj for key, event in events if event & selectors.EVENT_READ]
243
243
 
244
- def __killPid(self, pid, sig=9):
245
- """send signal :sig: to process :pid:
246
-
247
- :param int pid: process id
248
- :param int sig: signal to send, default 9 (SIGKILL)
249
- """
244
+ def __terminateProcess(self, process):
245
+ """Tries to terminate a process with SIGTERM. Returns a (gone, alive) tuple"""
246
+ self.log.verbose(f"Sending SIGTERM signal to PID {process.pid}")
250
247
  try:
251
- os.kill(pid, sig)
252
- except Exception as x:
253
- if str(x) != "[Errno 3] No such process":
254
- self.log.exception("Exception while killing timed out process")
255
- raise x
256
-
257
- def __poll(self, pid):
258
- """wait for :pid:"""
248
+ process.terminate()
249
+ except psutil.NoSuchProcess:
250
+ return ([], [])
251
+ return psutil.wait_procs([process], timeout=60)
252
+
253
+ def __poll(self, process):
254
+ """Non-blocking check of whether process `pid` is still alive.
255
+ Returns:
256
+ - (0, 0) if process is still running (like os.waitpid(pid, os.WNOHANG))
257
+ - (pid, exitcode) if process has terminated
258
+ - None if process info cannot be retrieved
259
+ """
259
260
  try:
260
- return os.waitpid(pid, os.WNOHANG)
261
- except os.error:
262
- if self.childKilled:
263
- return False
261
+ exitcode = process.wait(timeout=0)
262
+ return (process.pid, exitcode) # exited
263
+ except psutil.TimeoutExpired:
264
+ return (0, 0) # still running
265
+ except psutil.NoSuchProcess:
264
266
  return None
265
267
 
266
268
  def killChild(self, recursive=True):
267
- """kill child process
268
-
269
- :param boolean recursive: flag to kill all descendants
269
+ """Kills a process tree (including children) with signal SIGTERM. If that fails, escalate to SIGKILL
270
+ returns (gone, alive) tuple.
270
271
  """
271
- pgid = os.getpgid(self.childPID)
272
- if pgid != os.getpgrp():
273
- try:
274
- # Child is in its own group: kill the group
275
- os.killpg(pgid, signal.SIGTERM)
276
- except OSError:
277
- # Process is already dead
278
- pass
279
- else:
280
- # No separate group: walk the tree
281
- parent = psutil.Process(self.childPID)
282
- procs = parent.children(recursive=recursive)
283
- procs.append(parent)
284
- for p in procs:
272
+
273
+ self.log.info(f"Killing childPID {self.childPID}")
274
+
275
+ gone, alive = [], []
276
+ try:
277
+ child_process = psutil.Process(self.childPID)
278
+ except psutil.NoSuchProcess:
279
+ self.log.warn(f"Child PID {self.childPID} no longer exists")
280
+ return (gone, alive)
281
+
282
+ if recursive:
283
+ # grandchildren
284
+ children = child_process.children(recursive=True)
285
+ self.log.info(f"Sending kill signal to {len(children)} children PIDs")
286
+ for p in children:
285
287
  try:
286
288
  p.terminate()
287
289
  except psutil.NoSuchProcess:
288
- pass
289
- _gone, alive = psutil.wait_procs(procs, timeout=10)
290
- # Escalate any survivors
290
+ continue
291
+ g, a = psutil.wait_procs(children, timeout=60)
292
+ gone.extend(g)
293
+ alive.extend(a)
294
+
295
+ # now killing the child_process
296
+ g, a = self.__terminateProcess(child_process)
297
+ gone.extend(g)
298
+ alive.extend(a)
299
+
300
+ # if there's something still alive, use SIGKILL
301
+ if alive:
291
302
  for p in alive:
292
303
  try:
293
304
  p.kill()
294
305
  except psutil.NoSuchProcess:
295
306
  pass
296
307
 
297
- self.childKilled = True
308
+ return psutil.wait_procs(alive, timeout=60)
298
309
 
299
310
  def pythonCall(self, function, *stArgs, **stKeyArgs):
300
311
  """call python function :function: with :stArgs: and :stKeyArgs:"""
@@ -309,8 +320,6 @@ class Subprocess:
309
320
  if pid == 0:
310
321
  os.close(readFD)
311
322
  self.__executePythonFunction(function, writeFD, *stArgs, **stKeyArgs)
312
- # FIXME: the close it is done at __executePythonFunction, do we need it here?
313
- os.close(writeFD)
314
323
  else:
315
324
  os.close(writeFD)
316
325
  readSeq = self.__selectFD([readFD])
@@ -319,14 +328,13 @@ class Subprocess:
319
328
  try:
320
329
  if len(readSeq) == 0:
321
330
  self.log.debug("Timeout limit reached for pythonCall", function.__name__)
322
- self.__killPid(pid)
323
-
324
- # HACK to avoid python bug
325
- # self.wait()
326
- retries = 10000
327
- while os.waitpid(pid, 0) == -1 and retries > 0:
328
- time.sleep(0.001)
329
- retries -= 1
331
+ gone, alive = self.__terminateProcess(psutil.Process(pid))
332
+ if alive:
333
+ for p in alive:
334
+ try:
335
+ p.kill()
336
+ except psutil.NoSuchProcess:
337
+ continue
330
338
 
331
339
  return S_ERROR('%d seconds timeout for "%s" call' % (self.timeout, function.__name__))
332
340
  elif readSeq[0] == readFD:
@@ -400,7 +408,7 @@ class Subprocess:
400
408
  if len(dataString) + baseLength > self.bufferLimit:
401
409
  self.log.error("Maximum output buffer length reached")
402
410
  retDict = S_ERROR(
403
- "Reached maximum allowed length (%d bytes) for called " "function return value" % self.bufferLimit
411
+ "Reached maximum allowed length (%d bytes) for called function return value" % self.bufferLimit
404
412
  )
405
413
  retDict["Value"] = dataString
406
414
  return retDict
@@ -446,6 +454,7 @@ class Subprocess:
446
454
  start_new_session=start_new_session,
447
455
  )
448
456
  self.childPID = self.child.pid
457
+ child_process = psutil.Process(self.childPID)
449
458
  except OSError as v:
450
459
  retDict = S_ERROR(repr(v))
451
460
  retDict["Value"] = (-1, "", str(v))
@@ -464,9 +473,9 @@ class Subprocess:
464
473
  self.bufferList = [["", 0], ["", 0]]
465
474
  initialTime = time.time()
466
475
 
467
- exitStatus = self.__poll(self.child.pid)
476
+ exitStatus = self.__poll(child_process)
468
477
 
469
- while (0, 0) == exitStatus or exitStatus is None:
478
+ while (0, 0) == exitStatus: # This means that the process is still alive
470
479
  retDict = self.__readFromCommand()
471
480
  if not retDict["OK"]:
472
481
  return retDict
@@ -478,14 +487,14 @@ class Subprocess:
478
487
  1, "Timeout (%d seconds) for '%s' call" % (self.timeout, cmdSeq)
479
488
  )
480
489
  time.sleep(0.01)
481
- exitStatus = self.__poll(self.child.pid)
490
+ exitStatus = self.__poll(child_process)
482
491
 
483
492
  self.__readFromCommand()
484
493
 
485
494
  if exitStatus:
486
495
  exitStatus = exitStatus[1]
487
496
 
488
- if exitStatus >= 256:
497
+ if exitStatus and exitStatus >= 256:
489
498
  exitStatus = int(exitStatus / 256)
490
499
  return S_OK((exitStatus, self.bufferList[0][0], self.bufferList[1][0]))
491
500
  finally:
@@ -3,23 +3,25 @@
3
3
  # Date: 2012/12/11 18:04:25
4
4
  ########################################################################
5
5
 
6
- """ :mod: SubprocessTests
7
- =======================
6
+ """:mod: SubprocessTests
7
+ =======================
8
8
 
9
- .. module: SubprocessTests
10
- :synopsis: unittest for Subprocess module
11
- .. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
9
+ .. module: SubprocessTests
10
+ :synopsis: unittest for Subprocess module
11
+ .. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
12
12
 
13
- unittest for Subprocess module
13
+ unittest for Subprocess module
14
14
  """
15
- import time
15
+
16
16
  import platform
17
+ import time
17
18
  from os.path import dirname, join
18
19
  from subprocess import Popen
19
20
 
21
+ import psutil
20
22
  import pytest
21
23
 
22
- from DIRAC.Core.Utilities.Subprocess import systemCall, shellCall, pythonCall, getChildrenPIDs, Subprocess
24
+ from DIRAC.Core.Utilities.Subprocess import Subprocess, getChildrenPIDs, pythonCall, shellCall, systemCall
23
25
 
24
26
  # Mark this entire module as slow
25
27
  pytestmark = pytest.mark.slow
@@ -72,3 +74,51 @@ def test_decodingCommandOutput():
72
74
  retVal = sp.systemCall(r"""python -c 'import os; os.fdopen(2, "wb").write(b"\xdf")'""", shell=True)
73
75
  assert retVal["OK"]
74
76
  assert retVal["Value"] == (0, "", "\ufffd")
77
+
78
+
79
+ @pytest.fixture
80
+ def subprocess_instance():
81
+ """Provides a Subprocess instance for testing."""
82
+ subp = Subprocess()
83
+ return subp
84
+
85
+
86
+ @pytest.fixture
87
+ def dummy_child():
88
+ """Spawn a dummy process tree: parent -> child."""
89
+ # Start a shell that sleeps, with a subprocess child
90
+ parent = Popen(["bash", "-c", "sleep 10 & wait"])
91
+ time.sleep(0.2) # give it a moment to start
92
+ yield parent
93
+ # Ensure cleanup
94
+ try:
95
+ parent.terminate()
96
+ parent.wait(timeout=1)
97
+ except Exception:
98
+ pass
99
+
100
+
101
+ def test_kill_child_process_tree(subprocess_instance, dummy_child):
102
+ """Test that killChild kills both parent and its children."""
103
+ subprocess_instance.childPID = dummy_child.pid
104
+ parent_proc = psutil.Process(subprocess_instance.childPID)
105
+
106
+ # Sanity check: parent should exist
107
+ assert parent_proc.is_running()
108
+
109
+ # It should have at least one sleeping child
110
+ children = parent_proc.children(recursive=True)
111
+ assert children, "Expected dummy process to have at least one child"
112
+
113
+ # Kill the tree
114
+ gone, alive = subprocess_instance.killChild(recursive=True)
115
+
116
+ # Verify the parent and children are terminated
117
+ for p in gone:
118
+ assert not p.is_running(), f"Process {p.pid} still alive"
119
+ for p in alive:
120
+ assert not p.is_running(), f"Process {p.pid} still alive"
121
+
122
+ # Verify parent is gone
123
+ with pytest.raises(psutil.NoSuchProcess):
124
+ psutil.Process(subprocess_instance.childPID)
@@ -10,11 +10,11 @@ This module consists of DataManager and related classes.
10
10
  """
11
11
 
12
12
  # # imports
13
- from datetime import datetime, timedelta
13
+ import errno
14
14
  import fnmatch
15
15
  import os
16
16
  import time
17
- import errno
17
+ from datetime import datetime, timedelta
18
18
 
19
19
  # # from DIRAC
20
20
  import DIRAC
@@ -25,13 +25,13 @@ from DIRAC.Core.Utilities.File import makeGuid, getSize
25
25
  from DIRAC.Core.Utilities.List import randomize, breakListIntoChunks
26
26
  from DIRAC.Core.Utilities.ReturnValues import returnSingleResult
27
27
  from DIRAC.Core.Security.ProxyInfo import getProxyInfo
28
+ from DIRAC.Core.Security.ProxyInfo import getVOfromProxyGroup
28
29
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
29
30
  from DIRAC.DataManagementSystem.Client import MAX_FILENAME_LENGTH
30
- from DIRAC.MonitoringSystem.Client.DataOperationSender import DataOperationSender
31
31
  from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers
32
+ from DIRAC.MonitoringSystem.Client.DataOperationSender import DataOperationSender
32
33
  from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
33
34
  from DIRAC.Resources.Storage.StorageElement import StorageElement
34
- from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
35
35
 
36
36
 
37
37
  # # RSCID
@@ -89,7 +89,7 @@ class DataManager:
89
89
  :param vo: the VO for which the DataManager is created, get VO from the current proxy if not specified
90
90
  """
91
91
  self.log = gLogger.getSubLogger(self.__class__.__name__)
92
- self.voName = vo
92
+ self.voName = vo if vo else getVOfromProxyGroup().get("Value", None)
93
93
 
94
94
  if catalogs is None:
95
95
  catalogs = []
@@ -97,10 +97,9 @@ class DataManager:
97
97
 
98
98
  self.fileCatalog = FileCatalog(catalogs=catalogsToUse, vo=self.voName)
99
99
  self.accountingClient = None
100
- self.resourceStatus = ResourceStatus()
101
100
  self.ignoreMissingInFC = Operations(vo=self.voName).getValue("DataManagement/IgnoreMissingInFC", False)
102
101
  self.useCatalogPFN = Operations(vo=self.voName).getValue("DataManagement/UseCatalogPFN", True)
103
- self.dmsHelper = DMSHelpers(vo=vo)
102
+ self.dmsHelper = DMSHelpers(vo=self.voName)
104
103
  self.registrationProtocol = self.dmsHelper.getRegistrationProtocols()
105
104
  self.thirdPartyProtocols = self.dmsHelper.getThirdPartyProtocols()
106
105
  self.dataOpSender = DataOperationSender()
@@ -9,6 +9,7 @@ from DIRAC import gLogger
9
9
  from DIRAC.Core.Base.Script import Script
10
10
  from DIRAC.Core.Utilities.File import secureOpenForWrite
11
11
  from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
12
+ from DIRAC.ConfigurationSystem.Client.Helpers.Registry import reset_all_caches
12
13
 
13
14
 
14
15
  class ConfigCache:
@@ -69,5 +70,6 @@ class ConfigCache:
69
70
  try:
70
71
  with open(self.configCacheName, "rb") as fh:
71
72
  gConfigurationData.mergedCFG = pickle.load(fh)
73
+ reset_all_caches()
72
74
  except:
73
75
  gLogger.error("Cache corrupt or unreadable")
@@ -74,7 +74,10 @@ class InputDataAgent(AgentModule):
74
74
  """Main execution method"""
75
75
 
76
76
  # Get all the transformations
77
- result = self.transClient.getTransformations({"Status": "Active", "Type": self.transformationTypes})
77
+ result = self.transClient.getTransformations(
78
+ {"Status": "Active", "Type": self.transformationTypes},
79
+ columns=["TransformationID", "AuthorDN", "AuthorGroup"],
80
+ )
78
81
  if not result["OK"]:
79
82
  self.log.error("InputDataAgent.execute: Failed to get transformations.", result["Message"])
80
83
  return S_OK()
@@ -1,4 +1,4 @@
1
- """ Agent to extend the number of tasks given the Transformation definition
1
+ """Agent to extend the number of tasks given the Transformation definition
2
2
 
3
3
  The following options can be set for the MCExtensionAgent.
4
4
 
@@ -8,6 +8,7 @@ The following options can be set for the MCExtensionAgent.
8
8
  :dedent: 2
9
9
  :caption: MCExtensionAgent options
10
10
  """
11
+
11
12
  from DIRAC import S_OK, gLogger
12
13
  from DIRAC.Core.Base.AgentModule import AgentModule
13
14
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
@@ -54,7 +55,9 @@ class MCExtensionAgent(AgentModule):
54
55
  return S_OK("Disabled via CS flag")
55
56
 
56
57
  # Obtain the transformations in Cleaning status and remove any mention of the jobs/files
57
- res = self.transClient.getTransformations({"Status": "Active", "Type": self.transformationTypes})
58
+ res = self.transClient.getTransformations(
59
+ {"Status": "Active", "Type": self.transformationTypes}, columns=["TransformationID", "MaxNumberOfTasks"]
60
+ )
58
61
  if res["OK"]:
59
62
  for transDict in res["Value"]:
60
63
  transID = transDict["TransformationID"]
@@ -231,7 +231,9 @@ class TaskManagerAgentBase(AgentModule, TransformationAgentsUtilities):
231
231
  selectCond["Type"] = transType
232
232
  if agentType:
233
233
  selectCond["AgentType"] = agentType
234
- res = self.transClient.getTransformations(condDict=selectCond)
234
+ res = self.transClient.getTransformations(
235
+ condDict=selectCond, columns=["TransformationID", "Body", "Author", "AuthorGroup"]
236
+ )
235
237
  if not res["OK"]:
236
238
  self.log.error("Failed to get transformations:", res["Message"])
237
239
  elif not res["Value"]:
@@ -144,7 +144,8 @@ class TransformationCleaningAgent(AgentModule):
144
144
 
145
145
  # Obtain the transformations in Cleaning status and remove any mention of the jobs/files
146
146
  res = self.transClient.getTransformations(
147
- {"Status": TransformationStatus.CLEANING, "Type": self.transformationTypes}
147
+ {"Status": TransformationStatus.CLEANING, "Type": self.transformationTypes},
148
+ columns=["TransformationID", "Author", "AuthorGroup", "Type"],
148
149
  )
149
150
  if res["OK"]:
150
151
  for transDict in res["Value"]:
@@ -161,7 +162,10 @@ class TransformationCleaningAgent(AgentModule):
161
162
  self.log.error("Failed to get transformations", res["Message"])
162
163
 
163
164
  # Obtain the transformations in RemovingFiles status and removes the output files
164
- res = self.transClient.getTransformations({"Status": "RemovingFiles", "Type": self.transformationTypes})
165
+ res = self.transClient.getTransformations(
166
+ {"Status": "RemovingFiles", "Type": self.transformationTypes},
167
+ columns=["TransformationID", "Author", "AuthorGroup"],
168
+ )
165
169
  if res["OK"]:
166
170
  for transDict in res["Value"]:
167
171
  if self.shifterProxy:
@@ -183,6 +187,7 @@ class TransformationCleaningAgent(AgentModule):
183
187
  {"Status": TransformationStatus.COMPLETED, "Type": self.transformationTypes},
184
188
  older=olderThanTime,
185
189
  timeStamp="LastUpdate",
190
+ columns=["TransformationID", "Author", "AuthorGroup"],
186
191
  )
187
192
  if res["OK"]:
188
193
  for transDict in res["Value"]:
@@ -230,7 +235,10 @@ class TransformationCleaningAgent(AgentModule):
230
235
  return res
231
236
  transformationIDs = res["Value"]
232
237
  if transformationIDs:
233
- res = self.transClient.getTransformations({"TransformationID": transformationIDs})
238
+ res = self.transClient.getTransformations(
239
+ {"TransformationID": transformationIDs},
240
+ columns=["TransformationID", "Status", "Author", "AuthorGroup", "Type"],
241
+ )
234
242
  if not res["OK"]:
235
243
  self.log.error("Failed to get transformations", res["Message"])
236
244
  return res
@@ -76,7 +76,9 @@ class ValidateOutputDataAgent(AgentModule):
76
76
  self.updateWaitingIntegrity()
77
77
  gLogger.info("-" * 40)
78
78
 
79
- res = self.transClient.getTransformations({"Status": "ValidatingOutput", "Type": self.transformationTypes})
79
+ res = self.transClient.getTransformations(
80
+ {"Status": "ValidatingOutput", "Type": self.transformationTypes}, columns=["TransformationID"]
81
+ )
80
82
  if not res["OK"]:
81
83
  gLogger.error("Failed to get ValidatingOutput transformations", res["Message"])
82
84
  return res
@@ -98,7 +100,7 @@ class ValidateOutputDataAgent(AgentModule):
98
100
  def updateWaitingIntegrity(self):
99
101
  """Get 'WaitingIntegrity' transformations, update to 'ValidatedOutput'"""
100
102
  gLogger.info("Looking for transformations in the WaitingIntegrity status to update")
101
- res = self.transClient.getTransformations({"Status": "WaitingIntegrity"})
103
+ res = self.transClient.getTransformations({"Status": "WaitingIntegrity"}, columns=["TransformationID"])
102
104
  if not res["OK"]:
103
105
  gLogger.error("Failed to get WaitingIntegrity transformations", res["Message"])
104
106
  return res
@@ -114,13 +114,21 @@ class TransformationClient(Client):
114
114
  newer=None,
115
115
  timeStamp=None,
116
116
  orderAttribute=None,
117
- limit=100,
117
+ limit=None,
118
118
  extraParams=False,
119
119
  columns=None,
120
120
  ):
121
121
  """gets all the transformations in the system, incrementally. "limit" here is just used to determine the offset."""
122
122
  rpcClient = self._getRPC()
123
123
 
124
+ # If the body is requested (or is served by default)
125
+ # we take smaller chunk, not to take too much memory
126
+ # on the server
127
+ if columns and "Body" not in columns:
128
+ limit = 100_000
129
+ else:
130
+ limit = 1_000
131
+
124
132
  transformations = []
125
133
  if condDict is None:
126
134
  condDict = {}
@@ -6,6 +6,9 @@ This class is typically used as a base class for more specific data processing
6
6
  databases
7
7
  """
8
8
 
9
+ # Disable it because pylint does not understand decorator (convertToReturnValue)
10
+
11
+ # pylint: disable=invalid-sequence-index
9
12
  import re
10
13
  import time
11
14
  import threading
@@ -15,6 +18,7 @@ from errno import ENOENT
15
18
  from DIRAC import gLogger, S_OK, S_ERROR
16
19
  from DIRAC.Core.Base.DB import DB
17
20
  from DIRAC.Core.Utilities.DErrno import cmpError
21
+ from DIRAC.Core.Utilities.ReturnValues import convertToReturnValue, returnValueOrRaise
18
22
  from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
19
23
  from DIRAC.Core.Security.ProxyInfo import getProxyInfo
20
24
  from DIRAC.Core.Utilities.List import stringListToString, intListToString, breakListIntoChunks
@@ -25,6 +29,7 @@ from DIRAC.DataManagementSystem.Client.MetaQuery import MetaQuery
25
29
 
26
30
  MAX_ERROR_COUNT = 10
27
31
 
32
+ TMP_TABLE_JOIN_LIMIT = 100
28
33
  #############################################################################
29
34
 
30
35
 
@@ -270,6 +275,7 @@ class TransformationDB(DB):
270
275
  self.__updateTransformationLogging(transID, message, author, connection=connection)
271
276
  return S_OK(transID)
272
277
 
278
+ @convertToReturnValue
273
279
  def getTransformations(
274
280
  self,
275
281
  condDict=None,
@@ -289,32 +295,54 @@ class TransformationDB(DB):
289
295
  columns = self.TRANSPARAMS
290
296
  else:
291
297
  columns = [c for c in columns if c in self.TRANSPARAMS]
292
- req = "SELECT {} FROM Transformations {}".format(
293
- intListToString(columns),
294
- self.buildCondition(condDict, older, newer, timeStamp, orderAttribute, limit, offset=offset),
295
- )
296
- res = self._query(req, conn=connection)
297
- if not res["OK"]:
298
- return res
299
- if condDict is None:
300
- condDict = {}
301
- webList = []
298
+
299
+ join_query = ""
300
+
301
+ try:
302
+ # If we request multiple TransformationIDs, and they are more than TMP_TABLE_JOIN_LIMIT,
303
+ # we create a temporary table to speed up the query
304
+ if (
305
+ "TransformationID" in condDict
306
+ and isinstance(condDict["TransformationID"], list)
307
+ and len(condDict["TransformationID"]) > TMP_TABLE_JOIN_LIMIT
308
+ ):
309
+ # Create temporary table for TransformationIDs
310
+ transIDs = condDict.pop("TransformationID")
311
+ sqlCmd = "CREATE TEMPORARY TABLE to_query_TransformationIDs (TransID INTEGER NOT NULL, PRIMARY KEY (TransID)) ENGINE=MEMORY;"
312
+ returnValueOrRaise(self._update(sqlCmd, conn=connection))
313
+ join_query = " JOIN to_query_TransformationIDs t ON TransformationID = t.TransID"
314
+
315
+ # Insert TransformationIDs into temporary table
316
+ sqlCmd = "INSERT INTO to_query_TransformationIDs (TransID) VALUES ( %s )"
317
+ returnValueOrRaise(self._updatemany(sqlCmd, [(transID,) for transID in transIDs], conn=connection))
318
+
319
+ req = "SELECT {} FROM Transformations {} {}".format(
320
+ intListToString(columns),
321
+ join_query,
322
+ self.buildCondition(condDict, older, newer, timeStamp, orderAttribute, limit, offset=offset),
323
+ )
324
+ matching_transformations = returnValueOrRaise(self._query(req, conn=connection))
325
+
326
+ finally:
327
+ # Clean up temporary table
328
+ if join_query:
329
+ sqlCmd = "DROP TEMPORARY TABLE to_query_TransformationIDs"
330
+ self._update(sqlCmd, conn=connection)
331
+
332
+ # TODO: optimize by getting all the extra params at once
302
333
  resultList = []
303
- for row in res["Value"]:
334
+ for row in matching_transformations:
304
335
  # Prepare the structure for the web
305
- rList = [str(item) if not isinstance(item, int) else item for item in row]
306
336
  transDict = dict(zip(columns, row))
307
- webList.append(rList)
308
337
  if extraParams:
309
- res = self.__getAdditionalParameters(transDict["TransformationID"], connection=connection)
310
- if not res["OK"]:
311
- return res
312
- transDict.update(res["Value"])
338
+ trans_extra_param = returnValueOrRaise(
339
+ self.__getAdditionalParameters(transDict["TransformationID"], connection=connection)
340
+ )
341
+
342
+ transDict.update(trans_extra_param)
313
343
  resultList.append(transDict)
314
- result = S_OK(resultList)
315
- result["Records"] = webList
316
- result["ParameterNames"] = columns
317
- return result
344
+
345
+ return resultList
318
346
 
319
347
  def getTransformation(self, transName, extraParams=False, connection=False):
320
348
  """Get Transformation definition and parameters of Transformation identified by TransformationID"""
@@ -710,21 +738,38 @@ class TransformationDB(DB):
710
738
  countDict["Total"] = sum(countDict.values())
711
739
  return S_OK(countDict)
712
740
 
741
+ @convertToReturnValue
713
742
  def __addFilesToTransformation(self, transID, fileIDs, connection=False):
714
- req = "SELECT FileID from TransformationFiles"
715
- req = req + " WHERE TransformationID = %d AND FileID IN (%s);" % (transID, intListToString(fileIDs))
716
- res = self._query(req, conn=connection)
717
- if not res["OK"]:
718
- return res
719
- for tupleIn in res["Value"]:
720
- fileIDs.remove(tupleIn[0])
721
- if not fileIDs:
722
- return S_OK([])
723
- values = [(transID, fileID) for fileID in fileIDs]
724
- req = "INSERT INTO TransformationFiles (TransformationID,FileID,LastUpdate,InsertedTime) VALUES (%s, %s, UTC_TIMESTAMP(), UTC_TIMESTAMP())"
725
- if not (res := self._updatemany(req, values, conn=connection))["OK"]:
726
- return res
727
- return S_OK(fileIDs)
743
+ # Create temporary table for FileIDs
744
+ sqlCmd = "CREATE TEMPORARY TABLE to_query_FileIDs (FileID INT(11) UNSIGNED NOT NULL, PRIMARY KEY (FileID)) ENGINE=MEMORY;"
745
+ returnValueOrRaise(self._update(sqlCmd, conn=connection))
746
+
747
+ try:
748
+ # Insert FileIDs into temporary table
749
+ sqlCmd = "INSERT INTO to_query_FileIDs (FileID) VALUES ( %s )"
750
+ returnValueOrRaise(self._updatemany(sqlCmd, [(fileID,) for fileID in fileIDs], conn=connection))
751
+
752
+ # Query existing files using JOIN
753
+ req = (
754
+ "SELECT tf.FileID FROM TransformationFiles tf JOIN to_query_FileIDs t ON tf.FileID = t.FileID WHERE tf.TransformationID = %d;"
755
+ % transID
756
+ )
757
+ res = returnValueOrRaise(self._query(req, conn=connection))
758
+
759
+ # Remove already existing fileIDs using set difference for efficiency
760
+ existingFileIDs = {tupleIn[0] for tupleIn in res}
761
+ fileIDs = list(set(fileIDs) - existingFileIDs)
762
+ if not fileIDs:
763
+ return []
764
+
765
+ values = [(transID, fileID) for fileID in fileIDs]
766
+ req = "INSERT INTO TransformationFiles (TransformationID,FileID,LastUpdate,InsertedTime) VALUES (%s, %s, UTC_TIMESTAMP(), UTC_TIMESTAMP())"
767
+ returnValueOrRaise(self._updatemany(req, values, conn=connection))
768
+ return fileIDs
769
+ finally:
770
+ # Clean up temporary table
771
+ sqlCmd = "DROP TEMPORARY TABLE to_query_FileIDs"
772
+ returnValueOrRaise(self._update(sqlCmd, conn=connection))
728
773
 
729
774
  def __insertExistingTransformationFiles(self, transID, fileTuplesList, connection=False):
730
775
  """Inserting already transformation files in TransformationFiles table (e.g. for deriving transformations)"""
@@ -1271,18 +1316,35 @@ class TransformationDB(DB):
1271
1316
  # These methods manipulate the DataFiles table
1272
1317
  #
1273
1318
 
1319
+ @convertToReturnValue
1274
1320
  def __getFileIDsForLfns(self, lfns, connection=False):
1275
1321
  """Get file IDs for the given list of lfns
1276
1322
  warning: if the file is not present, we'll see no errors
1277
1323
  """
1278
- req = f"SELECT LFN,FileID FROM DataFiles WHERE LFN in ({stringListToString(lfns)});"
1279
- res = self._query(req, conn=connection)
1280
- if not res["OK"]:
1281
- return res
1282
- lfns = dict(res["Value"])
1283
- # Reverse dictionary
1284
- fids = {fileID: lfn for lfn, fileID in lfns.items()}
1285
- return S_OK((fids, lfns))
1324
+
1325
+ if not lfns:
1326
+ return ({}, {})
1327
+ # Create temporary table for LFNs
1328
+ sqlCmd = "CREATE TEMPORARY TABLE to_query_LFNs (LFN VARCHAR(255) NOT NULL, PRIMARY KEY (LFN)) ENGINE=MEMORY;"
1329
+ returnValueOrRaise(self._update(sqlCmd, conn=connection))
1330
+
1331
+ try:
1332
+ # Insert LFNs into temporary table
1333
+ sqlCmd = "INSERT INTO to_query_LFNs (LFN) VALUES ( %s )"
1334
+ returnValueOrRaise(self._updatemany(sqlCmd, [(lfn,) for lfn in lfns], conn=connection))
1335
+
1336
+ # Query using JOIN with temporary table
1337
+ req = "SELECT df.LFN, df.FileID FROM DataFiles df JOIN to_query_LFNs t ON df.LFN = t.LFN;"
1338
+ res = returnValueOrRaise(self._query(req, conn=connection))
1339
+
1340
+ lfns = dict(res)
1341
+ # Reverse dictionary
1342
+ fids = {fileID: lfn for lfn, fileID in lfns.items()}
1343
+ return (fids, lfns)
1344
+ finally:
1345
+ # Clean up temporary table
1346
+ sqlCmd = "DROP TEMPORARY TABLE to_query_LFNs"
1347
+ self._update(sqlCmd, conn=connection)
1286
1348
 
1287
1349
  def __getLfnsForFileIDs(self, fileIDs, connection=False):
1288
1350
  """Get lfns for the given list of fileIDs"""
@@ -1,22 +1,22 @@
1
- """ The Watchdog class is used by the Job Wrapper to resolve and monitor
2
- the system resource consumption. The Watchdog can determine if
3
- a running job is stalled and indicate this to the Job Wrapper.
4
- Furthermore, the Watchdog will identify when the Job CPU limit has been
5
- exceeded and fail jobs meaningfully.
6
-
7
- Information is returned to the WMS via the heart-beat mechanism. This
8
- also interprets control signals from the WMS e.g. to kill a running
9
- job.
10
-
11
- - Still to implement:
12
- - CPU normalization for correct comparison with job limit
1
+ """The Watchdog class is used by the Job Wrapper to resolve and monitor
2
+ the system resource consumption. The Watchdog can determine if
3
+ a running job is stalled and indicate this to the Job Wrapper.
4
+ Furthermore, the Watchdog will identify when the Job CPU limit has been
5
+ exceeded and fail jobs meaningfully.
6
+
7
+ Information is returned to the WMS via the heart-beat mechanism. This
8
+ also interprets control signals from the WMS e.g. to kill a running
9
+ job.
10
+
11
+ - Still to implement:
12
+ - CPU normalization for correct comparison with job limit
13
13
  """
14
+
14
15
  import datetime
15
16
  import errno
16
17
  import getpass
17
18
  import math
18
19
  import os
19
- import signal
20
20
  import socket
21
21
  import time
22
22
  from pathlib import Path
@@ -32,28 +32,6 @@ from DIRAC.WorkloadManagementSystem.Client import JobMinorStatus
32
32
  from DIRAC.WorkloadManagementSystem.Client.JobStateUpdateClient import JobStateUpdateClient
33
33
 
34
34
 
35
- def kill_proc_tree(pid, sig=signal.SIGTERM, includeParent=True):
36
- """Kill a process tree (including grandchildren) with signal
37
- "sig" and return a (gone, still_alive) tuple.
38
- called as soon as a child terminates.
39
-
40
- Taken from https://psutil.readthedocs.io/en/latest/index.html#kill-process-tree
41
- """
42
- assert pid != os.getpid(), "won't kill myself"
43
- parent = psutil.Process(pid)
44
- children = parent.children(recursive=True)
45
- if includeParent:
46
- children.append(parent)
47
- for p in children:
48
- try:
49
- p.send_signal(sig)
50
- except psutil.NoSuchProcess:
51
- pass
52
- _gone, alive = psutil.wait_procs(children, timeout=10)
53
- for p in alive:
54
- p.kill()
55
-
56
-
57
35
  class Watchdog:
58
36
  #############################################################################
59
37
  def __init__(self, pid, exeThread, spObject, jobCPUTime, memoryLimit=0, processors=1, jobArgs={}):
@@ -212,7 +190,7 @@ class Watchdog:
212
190
  if self.littleTimeLeftCount == 0 and self.__timeLeft() == -1:
213
191
  self.checkError = JobMinorStatus.JOB_EXCEEDED_CPU
214
192
  self.log.error(self.checkError, self.timeLeft)
215
- self.__killRunningThread()
193
+ self.spObject.killChild()
216
194
  return S_OK()
217
195
 
218
196
  self.littleTimeLeftCount -= 1
@@ -321,7 +299,7 @@ class Watchdog:
321
299
 
322
300
  self.log.info("=================END=================")
323
301
 
324
- self.__killRunningThread()
302
+ self.spObject.killChild()
325
303
  return S_OK()
326
304
 
327
305
  recentStdOut = "None"
@@ -408,7 +386,7 @@ class Watchdog:
408
386
  if "Kill" in signalDict:
409
387
  self.log.info("Received Kill signal, stopping job via control signal")
410
388
  self.checkError = JobMinorStatus.RECEIVED_KILL_SIGNAL
411
- self.__killRunningThread()
389
+ self.spObject.killChild()
412
390
  else:
413
391
  self.log.info("The following control signal was sent but not understood by the watchdog:")
414
392
  self.log.info(signalDict)
@@ -862,13 +840,6 @@ class Watchdog:
862
840
 
863
841
  return result
864
842
 
865
- #############################################################################
866
- def __killRunningThread(self):
867
- """Will kill the running thread process and any child processes."""
868
- self.log.info("Sending kill signal to application PID", self.spObject.getChildPID())
869
- self.spObject.killChild()
870
- return S_OK("Thread killed")
871
-
872
843
  #############################################################################
873
844
  def __sendSignOfLife(self, jobID, heartBeatDict, staticParamDict):
874
845
  """Sends sign of life 'heartbeat' signal and triggers control signal
@@ -1,5 +1,5 @@
1
- """ Test class for JobWrapper
2
- """
1
+ """Test class for JobWrapper"""
2
+
3
3
  import os
4
4
  import shutil
5
5
  import tempfile
@@ -314,8 +314,7 @@ def test_processKilledSubprocess(mocker):
314
314
  result = jw.process("sleep 20", {})
315
315
 
316
316
  assert result["OK"]
317
- assert result["Value"]["payloadStatus"] == 15 # SIGTERM
318
- assert not result["Value"]["payloadOutput"]
317
+ assert result["Value"]["payloadStatus"] is None
319
318
  assert not result["Value"]["payloadExecutorError"]
320
319
  assert result["Value"]["watchdogError"] == "Job is stalled!" # Error message from the watchdog
321
320
 
@@ -218,7 +218,7 @@ class PilotCStoJSONSynchronizer:
218
218
 
219
219
  preferredURLPatterns = gConfigurationData.extractOptionFromCFG("/DIRAC/PreferredURLPatterns")
220
220
  if preferredURLPatterns:
221
- pilotDict["PreferredURLPatterns"] = preferredURLPatterns
221
+ pilotDict["PreferredURLPatterns"] = preferredURLPatterns.replace(" ", "").split(",")
222
222
 
223
223
  self.log.debug("Got pilotDict", str(pilotDict))
224
224
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: DIRAC
3
- Version: 9.0.8
3
+ Version: 9.0.10
4
4
  Summary: DIRAC is an interware, meaning a software framework for distributed computing.
5
5
  Home-page: https://github.com/DIRACGrid/DIRAC/
6
6
  License: GPL-3.0-only
@@ -19,7 +19,7 @@ Requires-Dist: cachetools
19
19
  Requires-Dist: certifi
20
20
  Requires-Dist: cwltool
21
21
  Requires-Dist: diraccfg
22
- Requires-Dist: DIRACCommon==v9.0.8
22
+ Requires-Dist: DIRACCommon==v9.0.10
23
23
  Requires-Dist: diracx-client>=v0.0.1
24
24
  Requires-Dist: diracx-core>=v0.0.1
25
25
  Requires-Dist: diracx-cli>=v0.0.1
@@ -231,7 +231,7 @@ DIRAC/Core/Utilities/Devloader.py,sha256=nPEinBId-NzyG1zcjcnUQOO-pWmNJsmWkj-Xkt5
231
231
  DIRAC/Core/Utilities/DictCache.py,sha256=8Yo5RJnTKwL8NO960hnbNgrzGCnd-Iupk6JExoFFYxQ,12661
232
232
  DIRAC/Core/Utilities/Dictionaries.py,sha256=yYxhB0lycfU2rDxgvviqqPpEsaVxNPNS8ZlulHG1LLw,549
233
233
  DIRAC/Core/Utilities/DirectoryExplorer.py,sha256=t8__3M_1zparfvKbB_fR8hIB56mZtmBPgklm5FTxYFA,1212
234
- DIRAC/Core/Utilities/ElasticSearchDB.py,sha256=CxYdPsqCEpe3QOHRK5jiXIlfVj9K74LbTr-QW7jl49E,22731
234
+ DIRAC/Core/Utilities/ElasticSearchDB.py,sha256=Kr4hmm5PbsGpEdeQ1GgjkRM9m7evCB9yR4BNgWxCiMQ,22735
235
235
  DIRAC/Core/Utilities/EventDispatcher.py,sha256=wL2jA2gFRrzMPORJ10WEQloxZPNdcfDjGHq8ENDJ86Y,3142
236
236
  DIRAC/Core/Utilities/ExecutorDispatcher.py,sha256=npBVxBxKNmSDklJIfUMv-eadHO3oHbaASGoFZp54A0o,28893
237
237
  DIRAC/Core/Utilities/Extensions.py,sha256=k0kgUjc50oFZ1e2XwOERJLQMSSCbnx3HwBO_xFTffSU,7535
@@ -259,7 +259,7 @@ DIRAC/Core/Utilities/ReturnValues.py,sha256=EHtnn1I3Lvq6RO9tTVZMlf19a8QGX1B6Qa7l
259
259
  DIRAC/Core/Utilities/Shifter.py,sha256=EkJGPR_zbL0SS3E2_cq1-MjkTH91ZaFJlh-tQ2PxDGk,2571
260
260
  DIRAC/Core/Utilities/SiteSEMapping.py,sha256=Uynx_evXQPmfbMKH-TfqzLq4e_7APVrP6gC20cJEBYs,5683
261
261
  DIRAC/Core/Utilities/StateMachine.py,sha256=e1rc0X6J8Cf_W9HlLnLgroLkdrksUQUxdkH_qSoZVoA,769
262
- DIRAC/Core/Utilities/Subprocess.py,sha256=FcdEKKEySh_62S8VLjWp_HnuFYkKAjqsS8FLpPMS1b8,22756
262
+ DIRAC/Core/Utilities/Subprocess.py,sha256=_lNp7ncg6BYC2TLvkcqACelZSsb0_5oyM2Jq-T5YbFc,23315
263
263
  DIRAC/Core/Utilities/ThreadPool.py,sha256=tETtgUdW1dlq_7xp_nQUTxNC87jAkrNnwvuDpGS3ffE,11484
264
264
  DIRAC/Core/Utilities/ThreadSafe.py,sha256=EAxEMqEuYlzDvAaupW3fi2MY-3kGZJ0zbK9i7JhEmt4,1117
265
265
  DIRAC/Core/Utilities/ThreadScheduler.py,sha256=IzyS59NF0ZhQwGYLpZ-yMX8_6kXFSGL9FQapylnoPLE,6212
@@ -304,7 +304,7 @@ DIRAC/Core/Utilities/test/Test_Pfn.py,sha256=XWTXejQf_TnicaPbQ4ZyDsl1KXDpq3IMNFl
304
304
  DIRAC/Core/Utilities/test/Test_ProcessPool.py,sha256=NnfHNkhTOgADk-P9ds_ADuvcSNlK_XdoibRkk13r_NE,10890
305
305
  DIRAC/Core/Utilities/test/Test_Profiler.py,sha256=8QRRXm-PwJpjLGwAOJxnrpnnMbmVzXhJ1ZcQ9gDBX5c,4215
306
306
  DIRAC/Core/Utilities/test/Test_ReturnValues.py,sha256=w6Jz-Vblgu8RDXPzVi6BZjuFXcSmW-Zb0mcBgW1RWOw,1926
307
- DIRAC/Core/Utilities/test/Test_Subprocess.py,sha256=nAiVF5oMnle-4E8ijlWF-ilBfgN2VGkkFQQSKhIKN2o,2141
307
+ DIRAC/Core/Utilities/test/Test_Subprocess.py,sha256=f9THTKnVdGcNwpZ0PtfCQz0UAtNmF06126-F0mUAxWQ,3605
308
308
  DIRAC/Core/Utilities/test/Test_entrypoints.py,sha256=z_3f7m59v3W6ZsqgeCFbJoBnMY-cKR_blKbPqcV7528,413
309
309
  DIRAC/Core/Utilities/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
310
310
  DIRAC/Core/Workflow/Module.py,sha256=XF_iDyJ1wIwUesHLfDGz9BywHgEuOwqzC2f7fY3E7r4,12631
@@ -361,7 +361,7 @@ DIRAC/DataManagementSystem/Agent/RequestOperations/test/Test_ReplicateAndRegiste
361
361
  DIRAC/DataManagementSystem/Agent/RequestOperations/test/Test_RequestOperations.py,sha256=3XZCrFdp7Yk5oY9xlaL2itErA9xmNnCJgIIZ38FOiK0,8292
362
362
  DIRAC/DataManagementSystem/Client/ConsistencyInspector.py,sha256=dtKNfsVNud9x7aVzr-RI1jQZCqbEqgmaHmE5lxm7Qwo,31227
363
363
  DIRAC/DataManagementSystem/Client/DataIntegrityClient.py,sha256=2ug49tvNzEp1GO6nzcW42E43qPXnaJF-mpIVp1TkWFQ,9130
364
- DIRAC/DataManagementSystem/Client/DataManager.py,sha256=edLmRUHJtZEYAMyen3y9qwdG9XDl9-wc08qZh5pTPDA,82313
364
+ DIRAC/DataManagementSystem/Client/DataManager.py,sha256=ME5a_0csev0X07PVXQV14BrDrvCfOhPWCWAwlcbrK60,82313
365
365
  DIRAC/DataManagementSystem/Client/DirectoryListing.py,sha256=CvQZ5Da5WhgwdDL8iLk0xVBjKZrBHTdwMKWwSJwzwEI,7582
366
366
  DIRAC/DataManagementSystem/Client/FTS3Client.py,sha256=Kk1wy4YnkYuOI6IltyDYZqkzxV7Zsf7ZuoKT12_VbTw,3531
367
367
  DIRAC/DataManagementSystem/Client/FTS3File.py,sha256=gZFN_Uxc2tblUCETb4qWPJ2M1tSmKUvTMiZUL-yJ6M0,3122
@@ -604,7 +604,7 @@ DIRAC/Interfaces/API/test/testWF.jdl,sha256=byby75rpCSYh2FuSqqMP1YALbEcpo7ix6U4x
604
604
  DIRAC/Interfaces/API/test/testWF.xml,sha256=PIUoxu00j3OAB8Wcx4Uf_vPpxniO6jMUJgEib9IkeIQ,4452
605
605
  DIRAC/Interfaces/API/test/testWFSIO.jdl,sha256=dNZbBaYcz7wnzxrSML7DvpQYxO7kqUd5FmmIj6ijV5U,420
606
606
  DIRAC/Interfaces/Utilities/DCommands.py,sha256=qcto59gNjD_Mg2bWYqorxgv3S2K91S5f0dLGxxkEZXc,21265
607
- DIRAC/Interfaces/Utilities/DConfigCache.py,sha256=nUPaybZgVOBSWwjrW2vYWG-PfHtm0sEi46zFqIGnz8A,2566
607
+ DIRAC/Interfaces/Utilities/DConfigCache.py,sha256=tDiRio3wyvGRRKAJj-4BL8ZbYBhoW5q8SKJ928Uu3uE,2684
608
608
  DIRAC/Interfaces/scripts/__init__.py,sha256=bqdOR5-0t1MOV62P-oehaZd_d9YjGSWVTfW-gRXMqLg,44
609
609
  DIRAC/Interfaces/scripts/dcd.py,sha256=b5Yoc2amdnv-Aotujir4XlFrVVY7ASaxLzRAU02j63Y,1317
610
610
  DIRAC/Interfaces/scripts/dchgrp.py,sha256=0pFrWpppC9ktPlXRqcSSN_23tQR1gdB9JrTKBdQw2_Y,1692
@@ -1047,15 +1047,15 @@ DIRAC/StorageManagementSystem/scripts/dirac_stager_stage_files.py,sha256=zgOaJ6P
1047
1047
  DIRAC/TransformationSystem/ConfigTemplate.cfg,sha256=qm3MaoyX_zjhDGFG4NcAFBKLniWBaPcSkEZCKebYeh4,6730
1048
1048
  DIRAC/TransformationSystem/__init__.py,sha256=KTlQaDBvpqTf0WxwbYIOSppNJ34WZkbaTG1XBWEBvN0,43
1049
1049
  DIRAC/TransformationSystem/Agent/DataRecoveryAgent.py,sha256=JUfOBQd0U2uz9WvvArBRurgxyoVlBbg3rL5ZgdYs0Vw,26982
1050
- DIRAC/TransformationSystem/Agent/InputDataAgent.py,sha256=YdxfB1wb55MBgZAuWNuqx_zXHaxTAEGCKYikr0eZaD8,8085
1051
- DIRAC/TransformationSystem/Agent/MCExtensionAgent.py,sha256=0pmfiy8FTH670Vv3Md6TPo5iAXsu9y1e-1ybIOTQJPY,5266
1050
+ DIRAC/TransformationSystem/Agent/InputDataAgent.py,sha256=XytU-0aq0RJZhNNXxzvvn-AXsRBjt20Q3miFl5LBbYo,8177
1051
+ DIRAC/TransformationSystem/Agent/MCExtensionAgent.py,sha256=KXBgn_5r7oVIOX0d7gx5ok__sCxk4esJ8dX7hivS7C4,5338
1052
1052
  DIRAC/TransformationSystem/Agent/RequestTaskAgent.py,sha256=0FxuRVONDkC6DtDqfSC4LeZmheaevBsrh4yvan0DtVc,2689
1053
- DIRAC/TransformationSystem/Agent/TaskManagerAgentBase.py,sha256=v61j-e_qMQXACENK0JYWa7lMgL0-xNT9irzEdXDHs-8,30652
1053
+ DIRAC/TransformationSystem/Agent/TaskManagerAgentBase.py,sha256=I1DGxItHZ_nYs1_cGZTTrsts01rQl2_JuBHtYTljTGw,30737
1054
1054
  DIRAC/TransformationSystem/Agent/TransformationAgent.py,sha256=rUPyfX_Ip3ZUyBrmVBrYiddVmgeTx6gKiYPEzwUDOcY,33813
1055
1055
  DIRAC/TransformationSystem/Agent/TransformationAgentsUtilities.py,sha256=XrgrjQWSDQm0uxG-wyEY-W0A_MBJyIE-22TafqrLP3A,2805
1056
- DIRAC/TransformationSystem/Agent/TransformationCleaningAgent.py,sha256=uAu_aQxko4GF_cTxDxY4KmARqjuytBO89e4XVy5PPUM,32898
1056
+ DIRAC/TransformationSystem/Agent/TransformationCleaningAgent.py,sha256=FyEOWtp8_ZIMCEh1RYqLOENnB--unmKy6gmZkgLb7cM,33251
1057
1057
  DIRAC/TransformationSystem/Agent/TransformationPlugin.py,sha256=EpWil1Bla9xdrJkhj61oUFDpGzyWfC8acW3tjBh5JpQ,11322
1058
- DIRAC/TransformationSystem/Agent/ValidateOutputDataAgent.py,sha256=tsE0yE2bqSKwdjJml3Mros-tMOfpSDr9Bt8R3MTeqRo,10108
1058
+ DIRAC/TransformationSystem/Agent/ValidateOutputDataAgent.py,sha256=YzjUVECnWYvJ2zQyqMqRa4BZLqq2FUulXnI4tO6HlpU,10190
1059
1059
  DIRAC/TransformationSystem/Agent/WorkflowTaskAgent.py,sha256=UBNVVx0ZgiFnzAPCNC1dSuJXKkt2pPfpSqhy0CREFlg,1741
1060
1060
  DIRAC/TransformationSystem/Agent/__init__.py,sha256=g5D8rYLJboNfnLrH_MZmfyvrXhp0pZBjBTtmr9ZJBQw,49
1061
1061
  DIRAC/TransformationSystem/Agent/RequestOperations/SetFileStatus.py,sha256=asFCK8oxR-p4AMEz6XdFBqft72xUzpqypBN57l_zOEc,1857
@@ -1069,7 +1069,7 @@ DIRAC/TransformationSystem/Client/TaskManager.py,sha256=lh3AsNh7Q-YXf7WCoywNAo1X
1069
1069
  DIRAC/TransformationSystem/Client/TaskManagerPlugin.py,sha256=mCTiFmqs7IpbDcF9wzfoRSMeuWMsxI4LcZr_9u2vr-c,5528
1070
1070
  DIRAC/TransformationSystem/Client/Transformation.py,sha256=Iw8Q4X8ZAUe4HlXAb8TKRvg4vVVnqoaINTAP_b2v7pI,26539
1071
1071
  DIRAC/TransformationSystem/Client/TransformationCLI.py,sha256=EFAbksFDLjX92KcT6VZlriXwFBLjkkxWd0qVWEJPfjU,23972
1072
- DIRAC/TransformationSystem/Client/TransformationClient.py,sha256=6NtwGALJIe7XCrk1XC1HtGCN0IcqeEAL6fiMv1S2rt8,26405
1072
+ DIRAC/TransformationSystem/Client/TransformationClient.py,sha256=9fTV7qvhyaiWCeR-faTpjPy4Vqw4wPuDtr7oFUECJmg,26667
1073
1073
  DIRAC/TransformationSystem/Client/TransformationFilesStatus.py,sha256=3O89wYkPdvYPXnwxyM9x7QLcN6uODEV5yH76LFpmQ80,822
1074
1074
  DIRAC/TransformationSystem/Client/TransformationStatus.py,sha256=f7lzca1m6AfGG09OkA9_bQUthUyojeurS_p02r2oXyQ,1411
1075
1075
  DIRAC/TransformationSystem/Client/Utilities.py,sha256=OO2XBMYbFCpEs961k676WXDtn-tPUm7lQSxilUQhdiA,20369
@@ -1083,7 +1083,7 @@ DIRAC/TransformationSystem/Client/test/Test_Client_TaskManagerPlugin.py,sha256=0
1083
1083
  DIRAC/TransformationSystem/Client/test/Test_Client_TransformationClient.py,sha256=e0O2BZ7xA7uMQDf-68MXxQBRvICiL4KTCcSpSZtpHZc,2577
1084
1084
  DIRAC/TransformationSystem/Client/test/Test_Client_TransformationSystem.py,sha256=tADYLmibPoagAbhM_MToRjy8HHDBcR_6tHU7c3hquJY,13663
1085
1085
  DIRAC/TransformationSystem/Client/test/Test_Client_WorkflowTasks.py,sha256=0BNNF17NduQIiQIbamuQuilY7bklS7trtKzGfMjc_Tk,6128
1086
- DIRAC/TransformationSystem/DB/TransformationDB.py,sha256=QAvO_ZPAV_UySqk7hGmgUGu-faLEm4VjMf_McXazUo0,76520
1086
+ DIRAC/TransformationSystem/DB/TransformationDB.py,sha256=i3cVrpi19nUTD0wZ6JT_S7AUIsUQ3SuutEUYFnhM1Fg,79576
1087
1087
  DIRAC/TransformationSystem/DB/TransformationDB.sql,sha256=JfOfEj0s8q-qh6Nyg6_HrrWsvj3MVFd8zY9O2hf9YEg,6935
1088
1088
  DIRAC/TransformationSystem/DB/__init__.py,sha256=EZ7harwr41I_3b8a_D7j4tHGFrHQYUWYfZqfD6tAEb4,46
1089
1089
  DIRAC/TransformationSystem/Service/TornadoTransformationManagerHandler.py,sha256=yzNjl3kzFJ20zwI-QYtD0e-rila1Nu45IRp_BVVgols,342
@@ -1209,9 +1209,9 @@ DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapper.py,sha256=s_YHg_PsTGVHBLrS1
1209
1209
  DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperOfflineTemplate.py,sha256=wem5VDN9XiC7szAzdsbgHUxpIOQB2Hj36DIVMoV9px8,2490
1210
1210
  DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py,sha256=4QgcFPMLRaTagP9e_Vvsla8pFH8HdewklHfS-gyS4-g,3313
1211
1211
  DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperUtilities.py,sha256=5w_4PMnaHhuexChADDvt1L9Ih1PstdUuYWObnlv9Dto,10072
1212
- DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py,sha256=wGpIdnyVzI5T9agxNp94gmPZPceXWREaJiEtZg1lAzk,39997
1212
+ DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py,sha256=UWVDC_tWzlE9i5PsLycrRda4j3hWjaK1RnQVOvydg6U,38857
1213
1213
  DIRAC/WorkloadManagementSystem/JobWrapper/__init__.py,sha256=e9Oa_ddNLweR3Lp_HOMK6WqqCWWj2SLPxF5UH4F19ic,61
1214
- DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py,sha256=R9onrrnfyc1v4USt2nnvrYHFo5tKbpLjhMkXl4n0y2Y,39177
1214
+ DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py,sha256=AhevZTkZfFQV9XN5UTWFmoMe9GvwN_Caorto1o_YoU8,39119
1215
1215
  DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapperTemplate.py,sha256=dC_SvC5Rlchlj2NvBfN7FH1ioXgC8bf9U8BQnEL5GYg,21982
1216
1216
  DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_Watchdog.py,sha256=a-QJ1E1ZcWObhOVgxZYD_nYjseCWsbjT0KxjZDNWyAQ,882
1217
1217
  DIRAC/WorkloadManagementSystem/JobWrapper/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1242,7 +1242,7 @@ DIRAC/WorkloadManagementSystem/Utilities/JobModel.py,sha256=jN9sFbzMZo9tab6Kp7Oe
1242
1242
  DIRAC/WorkloadManagementSystem/Utilities/JobParameters.py,sha256=JW3AAEtBJn1gIO_rm2Ft5qqjfLteIo3HpQtGNZBfhxE,8365
1243
1243
  DIRAC/WorkloadManagementSystem/Utilities/JobStatusUtility.py,sha256=WtGJzC7fHvydANh8JH6e1Kk_jebrCMPr2c5cw3ufjm8,7826
1244
1244
  DIRAC/WorkloadManagementSystem/Utilities/ParametricJob.py,sha256=FNUsGhvsFVrtmA7r8G-sd4QTMeBkqG1sdtwiBUKQyd0,605
1245
- DIRAC/WorkloadManagementSystem/Utilities/PilotCStoJSONSynchronizer.py,sha256=Ezpfd90dV_j6fOn25v5gr-wNWh1nMpCOQZfPmGDHJD4,12448
1245
+ DIRAC/WorkloadManagementSystem/Utilities/PilotCStoJSONSynchronizer.py,sha256=ZQk2tD40986awO9pae1zmdPEWlnMJt4m61Z_RU3LWl8,12476
1246
1246
  DIRAC/WorkloadManagementSystem/Utilities/PilotWrapper.py,sha256=VcvQTpeyTbVYqSsPQDyAt37N2CaEAnIuvbR6yk4kYk8,15465
1247
1247
  DIRAC/WorkloadManagementSystem/Utilities/QueueUtilities.py,sha256=J5-n_lvWbW_TRjrlqp8hx1SHEaXDW2Dxp3R1hBBrWnE,12082
1248
1248
  DIRAC/WorkloadManagementSystem/Utilities/RemoteRunner.py,sha256=7FcEtlYSJMzdbLIFBUKD-j_wqRHya-ISqk8w-JRy3kw,12159
@@ -1296,9 +1296,9 @@ DIRAC/tests/Workflow/Integration/exe-script.py,sha256=B_slYdTocEzqfQLRhwuPiLyYUn
1296
1296
  DIRAC/tests/Workflow/Integration/helloWorld.py,sha256=tBgEHH3ZF7ZiTS57gtmm3DW-Qxgm_57HWHpM-Y8XSws,205
1297
1297
  DIRAC/tests/Workflow/Regression/helloWorld.py,sha256=69eCgFuVSYo-mK3Dj2dw1c6g86sF5FksKCf8V2aGVoM,509
1298
1298
  DIRAC/tests/Workflow/Regression/helloWorld.xml,sha256=xwydIcFTAHIX-YPfQfyxuQ7hzvIO3IhR3UAF7ORgkGg,5310
1299
- dirac-9.0.8.dist-info/licenses/LICENSE,sha256=uyr4oV6jmjUeepXZPPjkJRwa5q5MrI7jqJz5sVXNblQ,32452
1300
- dirac-9.0.8.dist-info/METADATA,sha256=gpKRR8YAjDBxrxogcUUGPNXojPku4FhNUVqBVSPRNWg,10016
1301
- dirac-9.0.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
1302
- dirac-9.0.8.dist-info/entry_points.txt,sha256=hupzIL8aVmjK3nn7RLKdhcaiPmLOiD3Kulh3CSDHKmw,16492
1303
- dirac-9.0.8.dist-info/top_level.txt,sha256=RISrnN9kb_mPqmVu8_o4jF-DSX8-h6AcgfkO9cgfkHA,6
1304
- dirac-9.0.8.dist-info/RECORD,,
1299
+ dirac-9.0.10.dist-info/licenses/LICENSE,sha256=uyr4oV6jmjUeepXZPPjkJRwa5q5MrI7jqJz5sVXNblQ,32452
1300
+ dirac-9.0.10.dist-info/METADATA,sha256=2Pde9NbM-pbmGOY4PXnSp-C9eZYR-YnqbJ8ZmRqnxzQ,10018
1301
+ dirac-9.0.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
1302
+ dirac-9.0.10.dist-info/entry_points.txt,sha256=hupzIL8aVmjK3nn7RLKdhcaiPmLOiD3Kulh3CSDHKmw,16492
1303
+ dirac-9.0.10.dist-info/top_level.txt,sha256=RISrnN9kb_mPqmVu8_o4jF-DSX8-h6AcgfkO9cgfkHA,6
1304
+ dirac-9.0.10.dist-info/RECORD,,
File without changes