DIRAC 9.0.0a69__py3-none-any.whl → 9.0.0a70__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. DIRAC/AccountingSystem/Client/Types/Network.py +8 -8
  2. DIRAC/AccountingSystem/Client/Types/PilotSubmission.py +3 -3
  3. DIRAC/ConfigurationSystem/Client/CSAPI.py +11 -1
  4. DIRAC/ConfigurationSystem/Client/Helpers/CSGlobals.py +0 -9
  5. DIRAC/ConfigurationSystem/Client/Helpers/Registry.py +3 -29
  6. DIRAC/ConfigurationSystem/Client/SyncPlugins/CERNLDAPSyncPlugin.py +4 -1
  7. DIRAC/ConfigurationSystem/ConfigTemplate.cfg +3 -0
  8. DIRAC/ConfigurationSystem/private/Modificator.py +11 -3
  9. DIRAC/ConfigurationSystem/private/RefresherBase.py +4 -2
  10. DIRAC/Core/DISET/ServiceReactor.py +11 -3
  11. DIRAC/Core/DISET/private/Transports/M2SSLTransport.py +9 -7
  12. DIRAC/Core/Security/DiracX.py +11 -6
  13. DIRAC/Core/Security/test/test_diracx_token_from_pem.py +161 -0
  14. DIRAC/Core/Tornado/Server/TornadoService.py +1 -1
  15. DIRAC/Core/Utilities/ElasticSearchDB.py +1 -2
  16. DIRAC/Core/Utilities/Subprocess.py +66 -57
  17. DIRAC/Core/Utilities/test/Test_Profiler.py +20 -20
  18. DIRAC/Core/Utilities/test/Test_Subprocess.py +58 -8
  19. DIRAC/Core/scripts/dirac_apptainer_exec.py +8 -8
  20. DIRAC/DataManagementSystem/Agent/FTS3Agent.py +8 -7
  21. DIRAC/DataManagementSystem/Client/DataManager.py +6 -7
  22. DIRAC/DataManagementSystem/Client/FTS3Job.py +125 -34
  23. DIRAC/DataManagementSystem/Client/test/Test_FTS3Objects.py +1 -0
  24. DIRAC/DataManagementSystem/Client/test/Test_scitag.py +69 -0
  25. DIRAC/DataManagementSystem/DB/FileCatalogComponents/DatasetManager/DatasetManager.py +1 -1
  26. DIRAC/DataManagementSystem/scripts/dirac_dms_create_moving_request.py +2 -0
  27. DIRAC/FrameworkSystem/DB/InstalledComponentsDB.py +3 -2
  28. DIRAC/FrameworkSystem/DB/ProxyDB.py +9 -5
  29. DIRAC/FrameworkSystem/Utilities/MonitoringUtilities.py +1 -0
  30. DIRAC/FrameworkSystem/Utilities/TokenManagementUtilities.py +3 -2
  31. DIRAC/FrameworkSystem/Utilities/diracx.py +41 -10
  32. DIRAC/FrameworkSystem/scripts/dirac_login.py +2 -2
  33. DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py +1 -1
  34. DIRAC/FrameworkSystem/scripts/dirac_uninstall_component.py +1 -0
  35. DIRAC/Interfaces/API/Dirac.py +3 -6
  36. DIRAC/Interfaces/Utilities/DConfigCache.py +2 -0
  37. DIRAC/MonitoringSystem/DB/MonitoringDB.py +6 -5
  38. DIRAC/MonitoringSystem/Service/WebAppHandler.py +25 -6
  39. DIRAC/MonitoringSystem/private/MainReporter.py +0 -3
  40. DIRAC/RequestManagementSystem/Agent/RequestExecutingAgent.py +8 -6
  41. DIRAC/RequestManagementSystem/ConfigTemplate.cfg +6 -6
  42. DIRAC/ResourceStatusSystem/Command/FreeDiskSpaceCommand.py +3 -1
  43. DIRAC/Resources/Computing/AREXComputingElement.py +18 -2
  44. DIRAC/Resources/Computing/BatchSystems/Condor.py +0 -3
  45. DIRAC/Resources/Computing/BatchSystems/executeBatch.py +15 -7
  46. DIRAC/Resources/Computing/LocalComputingElement.py +0 -2
  47. DIRAC/Resources/Computing/SSHComputingElement.py +61 -38
  48. DIRAC/Resources/IdProvider/CheckInIdProvider.py +13 -0
  49. DIRAC/Resources/IdProvider/IdProviderFactory.py +13 -3
  50. DIRAC/Resources/IdProvider/tests/Test_IdProviderFactory.py +7 -0
  51. DIRAC/Resources/Storage/FileStorage.py +121 -2
  52. DIRAC/TransformationSystem/Agent/InputDataAgent.py +4 -1
  53. DIRAC/TransformationSystem/Agent/MCExtensionAgent.py +5 -2
  54. DIRAC/TransformationSystem/Agent/TaskManagerAgentBase.py +3 -4
  55. DIRAC/TransformationSystem/Agent/TransformationCleaningAgent.py +44 -9
  56. DIRAC/TransformationSystem/Agent/ValidateOutputDataAgent.py +4 -2
  57. DIRAC/TransformationSystem/Client/TransformationClient.py +9 -1
  58. DIRAC/TransformationSystem/Client/Utilities.py +6 -3
  59. DIRAC/TransformationSystem/DB/TransformationDB.py +105 -43
  60. DIRAC/TransformationSystem/Utilities/ReplicationCLIParameters.py +3 -3
  61. DIRAC/TransformationSystem/scripts/dirac_production_runjoblocal.py +2 -4
  62. DIRAC/TransformationSystem/test/Test_replicationTransformation.py +5 -6
  63. DIRAC/WorkloadManagementSystem/Agent/SiteDirector.py +8 -11
  64. DIRAC/WorkloadManagementSystem/Agent/StalledJobAgent.py +39 -7
  65. DIRAC/WorkloadManagementSystem/Agent/test/Test_Agent_SiteDirector.py +8 -2
  66. DIRAC/WorkloadManagementSystem/Agent/test/Test_Agent_StalledJobAgent.py +24 -4
  67. DIRAC/WorkloadManagementSystem/Client/DownloadInputData.py +4 -3
  68. DIRAC/WorkloadManagementSystem/ConfigTemplate.cfg +3 -3
  69. DIRAC/WorkloadManagementSystem/DB/JobParametersDB.py +8 -8
  70. DIRAC/WorkloadManagementSystem/DB/SandboxMetadataDB.py +1 -1
  71. DIRAC/WorkloadManagementSystem/DB/StatusUtils.py +48 -21
  72. DIRAC/WorkloadManagementSystem/DB/tests/Test_StatusUtils.py +19 -4
  73. DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapper.py +3 -4
  74. DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py +16 -45
  75. DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py +18 -9
  76. DIRAC/WorkloadManagementSystem/Service/JobManagerHandler.py +25 -2
  77. DIRAC/WorkloadManagementSystem/Service/WMSAdministratorHandler.py +18 -31
  78. DIRAC/WorkloadManagementSystem/Utilities/PilotCStoJSONSynchronizer.py +4 -1
  79. {dirac-9.0.0a69.dist-info → dirac-9.0.0a70.dist-info}/METADATA +6 -5
  80. {dirac-9.0.0a69.dist-info → dirac-9.0.0a70.dist-info}/RECORD +84 -82
  81. {dirac-9.0.0a69.dist-info → dirac-9.0.0a70.dist-info}/WHEEL +0 -0
  82. {dirac-9.0.0a69.dist-info → dirac-9.0.0a70.dist-info}/entry_points.txt +0 -0
  83. {dirac-9.0.0a69.dist-info → dirac-9.0.0a70.dist-info}/licenses/LICENSE +0 -0
  84. {dirac-9.0.0a69.dist-info → dirac-9.0.0a70.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,15 @@
1
- """ FTS3Job module containing only the FTS3Job class """
1
+ """FTS3Job module containing only the FTS3Job class"""
2
2
 
3
3
  import datetime
4
4
  import errno
5
+ import os
6
+ import requests
5
7
  from packaging.version import Version
6
8
 
9
+ from cachetools import cachedmethod, LRUCache, TTLCache, cached
10
+ from threading import Lock
11
+ from typing import Optional
12
+
7
13
 
8
14
  # Requires at least version 3.3.3
9
15
  from fts3 import __version__ as fts3_version
@@ -26,8 +32,9 @@ from DIRAC.Resources.Storage.StorageElement import StorageElement
26
32
 
27
33
  from DIRAC.FrameworkSystem.Client.Logger import gLogger
28
34
  from DIRAC.FrameworkSystem.Client.TokenManagerClient import gTokenManager
35
+ from DIRAC.FrameworkSystem.Utilities.TokenManagementUtilities import getIdProviderClient
29
36
 
30
- from DIRAC.Core.Utilities.ReturnValues import S_OK, S_ERROR
37
+ from DIRAC.Core.Utilities.ReturnValues import S_OK, S_ERROR, returnValueOrRaise
31
38
  from DIRAC.Core.Utilities.DErrno import cmpError
32
39
 
33
40
  from DIRAC.Core.Utilities.JEncode import JSerializable
@@ -36,6 +43,54 @@ from DIRAC.DataManagementSystem.Client.FTS3File import FTS3File
36
43
  # 3 days in seconds
37
44
  BRING_ONLINE_TIMEOUT = 259200
38
45
 
46
+ # Number of IdP to keep in cache. Should correspond roughly
47
+ # to the number of groups performing transfers
48
+ IDP_CACHE_SIZE = 8
49
+
50
+
51
+ _scitag_cache = TTLCache(maxsize=10, ttl=3600)
52
+ _scitag_lock = Lock()
53
+ _scitag_json_cache = TTLCache(maxsize=1, ttl=86400)
54
+ _scitag_json_lock = Lock()
55
+
56
+
57
+ @cached(_scitag_cache, lock=_scitag_lock)
58
+ def get_scitag(vo: str, activity: Optional[str] = None) -> int:
59
+ """
60
+ Get the scitag based on the VO and activity.
61
+ If the VO is not found in the scitag.json, it defaults to 1.
62
+ If no specific activity is provided, it defaults to the "default" activityName.
63
+
64
+ :param vo: The VO for which to get the scitag
65
+ :param activity: The activity for which to get the scitag
66
+ :return: The scitag value
67
+ """
68
+
69
+ @cached(_scitag_json_cache, lock=_scitag_json_lock)
70
+ def get_remote_json():
71
+ gLogger.verbose("Fetching https://scitags.org/api.json from the network")
72
+ response = requests.get("https://scitags.org/api.json")
73
+ response.raise_for_status()
74
+ return response.json()
75
+
76
+ vo_id = 1 # Default VO ID
77
+ activity_id = 1 # Default activity ID
78
+
79
+ try:
80
+ # Load the JSON data from the cache or network
81
+ sj = get_remote_json()
82
+
83
+ for experiment in sj.get("experiments", []):
84
+ if experiment.get("expName") == vo.lower():
85
+ vo_id = experiment.get("expId")
86
+ for act in experiment.get("activities", []):
87
+ if act.get("activityName") == activity:
88
+ activity_id = act.get("activityId")
89
+ except Exception as e:
90
+ gLogger.error(f"Error fetching or parsing scitag.json. Using default scitag values.", repr(e))
91
+ # Logic to determine the scitag based on vo and activity (this is what FTS wants)
92
+ return vo_id << 6 | activity_id # Example logic, replace with actual implementation
93
+
39
94
 
40
95
  class FTS3Job(JSerializable):
41
96
  """Abstract class to represent a job to be executed by FTS. It belongs
@@ -78,6 +133,8 @@ class FTS3Job(JSerializable):
78
133
  "userGroup",
79
134
  ]
80
135
 
136
+ _idp_cache = LRUCache(maxsize=IDP_CACHE_SIZE)
137
+
81
138
  def __init__(self):
82
139
  self.submitTime = None
83
140
  self.lastUpdate = None
@@ -111,7 +168,12 @@ class FTS3Job(JSerializable):
111
168
  # temporary used only for accounting
112
169
  # it is set by the monitor method
113
170
  # when a job is in a final state
114
- self.accountingDict = None
171
+ self.accountingDicts = None
172
+
173
+ @classmethod
174
+ @cachedmethod(lambda cls: cls._idp_cache)
175
+ def _getIdpClient(cls, group_name: str):
176
+ return returnValueOrRaise(getIdProviderClient(group_name, None, client_name_prefix="fts"))
115
177
 
116
178
  def monitor(self, context=None, ftsServer=None, ucert=None):
117
179
  """Queries the fts server to monitor the job.
@@ -143,7 +205,6 @@ class FTS3Job(JSerializable):
143
205
 
144
206
  if not self.ftsGUID:
145
207
  return S_ERROR("FTSGUID not set, FTS job not submitted?")
146
-
147
208
  if not context:
148
209
  if not ftsServer:
149
210
  ftsServer = self.ftsServer
@@ -170,13 +231,14 @@ class FTS3Job(JSerializable):
170
231
  self.error = jobStatusDict["reason"]
171
232
 
172
233
  if newStatus in self.FINAL_STATES:
173
- self._fillAccountingDict(jobStatusDict)
234
+ self._fillAccountingDicts(jobStatusDict)
174
235
 
175
236
  filesInfoList = jobStatusDict["files"]
176
237
  filesStatus = {}
177
238
  statusSummary = {}
178
239
 
179
240
  # Make a copy, since we are potentially
241
+
180
242
  # deleting objects
181
243
  for fileDict in list(filesInfoList):
182
244
  file_state = fileDict["file_state"].capitalize()
@@ -231,7 +293,7 @@ class FTS3Job(JSerializable):
231
293
  # so we put this back into the monitoring data such that the accounting is done properly
232
294
  jobStatusDict["files"] = filesInfoList
233
295
  if newStatus in self.FINAL_STATES:
234
- self._fillAccountingDict(jobStatusDict)
296
+ self._fillAccountingDicts(jobStatusDict)
235
297
 
236
298
  total = len(filesInfoList)
237
299
  completed = sum(statusSummary.get(state, 0) for state in FTS3File.FTS_FINAL_STATES)
@@ -456,6 +518,9 @@ class FTS3Job(JSerializable):
456
518
 
457
519
  ftsFileID = getattr(ftsFile, "fileID")
458
520
 
521
+ # scitag 65 is 1 << 6 | 1 (default experiment, default activity)
522
+ scitag = get_scitag(vo=self.vo, activity=self.activity)
523
+
459
524
  # Under normal circumstances, we simply submit an fts transfer as such:
460
525
  # * srcProto://myFile -> destProto://myFile
461
526
  #
@@ -485,6 +550,7 @@ class FTS3Job(JSerializable):
485
550
  filesize=ftsFile.size,
486
551
  metadata=stageTrans_metadata,
487
552
  activity=self.activity,
553
+ scitag=scitag,
488
554
  )
489
555
  transfers.append(stageTrans)
490
556
 
@@ -509,11 +575,10 @@ class FTS3Job(JSerializable):
509
575
  if not res["OK"]:
510
576
  return res
511
577
  srcTokenPath = res["Value"]
512
- res = gTokenManager.getToken(
513
- userGroup=self.userGroup,
514
- requiredTimeLeft=3600,
578
+ res = self._getIdpClient(self.userGroup).fetchToken(
579
+ grant_type="client_credentials",
515
580
  scope=[f"storage.read:/{srcTokenPath}", "offline_access"],
516
- useCache=False,
581
+ # TODO: add a specific audience
517
582
  )
518
583
  if not res["OK"]:
519
584
  return res
@@ -528,11 +593,17 @@ class FTS3Job(JSerializable):
528
593
  if not res["OK"]:
529
594
  return res
530
595
  dstTokenPath = res["Value"]
531
- res = gTokenManager.getToken(
532
- userGroup=self.userGroup,
533
- requiredTimeLeft=3600,
534
- scope=[f"storage.modify:/{dstTokenPath}", f"storage.read:/{dstTokenPath}", "offline_access"],
535
- useCache=False,
596
+ res = self._getIdpClient(self.userGroup).fetchToken(
597
+ grant_type="client_credentials",
598
+ scope=[
599
+ f"storage.modify:/{dstTokenPath}",
600
+ f"storage.read:/{dstTokenPath}",
601
+ # Needed because CNAF
602
+ # https://ggus.eu/index.php?mode=ticket_info&ticket_id=165048
603
+ f"storage.read:/{os.path.dirname(dstTokenPath)}",
604
+ "offline_access",
605
+ ],
606
+ # TODO: add a specific audience
536
607
  )
537
608
  if not res["OK"]:
538
609
  return res
@@ -553,6 +624,7 @@ class FTS3Job(JSerializable):
553
624
  activity=self.activity,
554
625
  source_token=srcToken,
555
626
  destination_token=dstToken,
627
+ scitag=scitag,
556
628
  )
557
629
 
558
630
  transfers.append(trans)
@@ -588,6 +660,7 @@ class FTS3Job(JSerializable):
588
660
  multihop=isMultiHop,
589
661
  metadata=job_metadata,
590
662
  priority=self.priority,
663
+ unmanaged_tokens=True,
591
664
  archive_timeout=archive_timeout,
592
665
  **dest_spacetoken,
593
666
  )
@@ -728,6 +801,7 @@ class FTS3Job(JSerializable):
728
801
  retry=3,
729
802
  metadata=job_metadata,
730
803
  priority=self.priority,
804
+ unmanaged_tokens=True,
731
805
  **dest_spacetoken,
732
806
  )
733
807
 
@@ -882,9 +956,9 @@ class FTS3Job(JSerializable):
882
956
  gLogger.exception("Error generating context", repr(e))
883
957
  return S_ERROR(repr(e))
884
958
 
885
- def _fillAccountingDict(self, jobStatusDict):
886
- """This methods generates the necessary information to create a DataOperation
887
- accounting record, and stores them as a instance attribute.
959
+ def _fillAccountingDicts(self, jobStatusDict):
960
+ """This methods generates the necessary information to create DataOperation
961
+ accounting records, and stores them as a instance attribute.
888
962
 
889
963
  For it to be relevant, it should be called only when the job is in a final state.
890
964
 
@@ -893,6 +967,7 @@ class FTS3Job(JSerializable):
893
967
  :returns: None
894
968
  """
895
969
 
970
+ accountingDicts = []
896
971
  accountingDict = dict()
897
972
  sourceSE = None
898
973
  targetSE = None
@@ -903,16 +978,24 @@ class FTS3Job(JSerializable):
903
978
  accountingDict["Protocol"] = "FTS3"
904
979
  accountingDict["ExecutionSite"] = self.ftsServer
905
980
 
981
+ # Registration values must be set anyway
982
+ accountingDict["RegistrationTime"] = 0.0
983
+ accountingDict["RegistrationOK"] = 0
984
+ accountingDict["RegistrationTotal"] = 0
985
+
906
986
  # We cannot rely on all the transient attributes (like self.filesToSubmit)
907
987
  # because it is probably not filed by the time we monitor !
908
988
 
909
989
  filesInfoList = jobStatusDict["files"]
910
990
  successfulFiles = []
991
+ failedFiles = []
911
992
 
912
993
  for fileDict in filesInfoList:
913
994
  file_state = fileDict["file_state"].capitalize()
914
995
  if file_state in FTS3File.FTS_SUCCESS_STATES:
915
996
  successfulFiles.append(fileDict)
997
+ else:
998
+ failedFiles.append(fileDict)
916
999
 
917
1000
  job_metadata = jobStatusDict["job_metadata"]
918
1001
  # previous version of the code did not have dictionary as
@@ -921,23 +1004,31 @@ class FTS3Job(JSerializable):
921
1004
  sourceSE = job_metadata.get("sourceSE")
922
1005
  targetSE = job_metadata.get("targetSE")
923
1006
 
924
- accountingDict["TransferOK"] = len(successfulFiles)
925
- accountingDict["TransferTotal"] = len(filesInfoList)
926
- # We need this if in the list comprehension because staging only jobs have `None` as filesize
927
- accountingDict["TransferSize"] = sum(
928
- fileDict["filesize"] for fileDict in successfulFiles if fileDict["filesize"]
929
- )
930
- accountingDict["FinalStatus"] = self.status
931
1007
  accountingDict["Source"] = sourceSE
932
1008
  accountingDict["Destination"] = targetSE
933
- # We need this if in the list comprehension because staging only jobs have `None` as tx_duration
934
- accountingDict["TransferTime"] = sum(
935
- int(fileDict["tx_duration"]) for fileDict in successfulFiles if fileDict["tx_duration"]
936
- )
937
1009
 
938
- # Registration values must be set anyway
939
- accountingDict["RegistrationTime"] = 0.0
940
- accountingDict["RegistrationOK"] = 0
941
- accountingDict["RegistrationTotal"] = 0
1010
+ if successfulFiles:
1011
+ successfulDict = accountingDict.copy()
1012
+ successfulDict["TransferOK"] = len(successfulFiles)
1013
+ successfulDict["TransferTotal"] = len(successfulFiles)
1014
+ # We need this if in the list comprehension because staging only jobs have `None` as filesize
1015
+ successfulDict["TransferSize"] = sum(
1016
+ fileDict["filesize"] for fileDict in successfulFiles if fileDict["filesize"]
1017
+ )
1018
+ successfulDict["FinalStatus"] = "Finished"
942
1019
 
943
- self.accountingDict = accountingDict
1020
+ # We need this if in the list comprehension because staging only jobs have `None` as tx_duration
1021
+ successfulDict["TransferTime"] = sum(
1022
+ int(fileDict["tx_duration"]) for fileDict in successfulFiles if fileDict["tx_duration"]
1023
+ )
1024
+ accountingDicts.append(successfulDict)
1025
+ if failedFiles:
1026
+ failedDict = accountingDict.copy()
1027
+ failedDict["TransferOK"] = 0
1028
+ failedDict["TransferTotal"] = len(failedFiles)
1029
+ failedDict["TransferSize"] = 0
1030
+ failedDict["FinalStatus"] = "Failed"
1031
+ failedDict["TransferTime"] = 0
1032
+ accountingDicts.append(failedDict)
1033
+
1034
+ self.accountingDicts = accountingDicts
@@ -204,6 +204,7 @@ def generateFTS3Job(sourceSE, targetSE, lfns, multiHopSE=None):
204
204
  newJob.sourceSE = sourceSE
205
205
  newJob.targetSE = targetSE
206
206
  newJob.multiHopSE = multiHopSE
207
+ newJob.vo = "lhcb"
207
208
  filesToSubmit = []
208
209
 
209
210
  for i, lfn in enumerate(lfns, start=1):
@@ -0,0 +1,69 @@
1
+ from unittest.mock import Mock, patch
2
+
3
+ import pytest
4
+
5
+ from DIRAC.DataManagementSystem.Client.FTS3Job import get_scitag
6
+
7
+
8
+ class TestGetScitag:
9
+ def test_valid_vo_and_activity(self):
10
+ """Test get_scitag with valid VO and activity."""
11
+ result = get_scitag("atlas", "Analysis Input")
12
+ expected = 2 << 6 | 17 # atlas expId=2, analysis activityId=17
13
+ assert result == expected
14
+
15
+ def test_valid_vo_no_activity(self):
16
+ """Test get_scitag with valid VO but no specific activity (should use default)."""
17
+ result = get_scitag("cms")
18
+ expected = 3 << 6 | 1 # cms expId=200, default activityId=1
19
+ assert result == expected
20
+
21
+ def test_invalid_vo(self):
22
+ """Test get_scitag with invalid VO (should use default vo_id=1)."""
23
+ result = get_scitag("nonexistent")
24
+ expected = 1 << 6 | 1 # default vo_id=1, default activity_id=1
25
+ assert result == expected
26
+
27
+ def test_valid_vo_invalid_activity(self):
28
+ """Test get_scitag with valid VO but invalid activity."""
29
+ result = get_scitag("atlas", "nonexistent_activity")
30
+ expected = 2 << 6 | 1 # atlas expId=2, default activity_id=1
31
+ assert result == expected
32
+
33
+ def test_case_insensitive_vo(self):
34
+ """Test that VO matching is case insensitive."""
35
+ result = get_scitag("ATLAS", "Data Brokering")
36
+ expected = 2 << 6 | 3 # atlas expId=2, production activityId=3
37
+ assert result == expected
38
+
39
+
40
+ @pytest.mark.parametrize(
41
+ "vo,activity,expected_vo_id,expected_activity_id",
42
+ [
43
+ ("atlas", "Analysis Output", 2, 18),
44
+ ("atlas", "Debug", 2, 9),
45
+ ("cms", "Cache", 3, 3),
46
+ ("cms", "default", 3, 1),
47
+ ("nonexistent", "any", 1, 1), # defaults
48
+ ("atlas", "nonexistent", 2, 1), # valid vo, invalid activity
49
+ ],
50
+ )
51
+ def test_parametrized_scenarios(vo, activity, expected_vo_id, expected_activity_id):
52
+ """Parametrized test for various VO and activity combinations."""
53
+ result = get_scitag(vo, activity)
54
+ expected = expected_vo_id << 6 | expected_activity_id
55
+ assert result == expected
56
+
57
+
58
+ @pytest.mark.parametrize(
59
+ "vo,expected_result",
60
+ [
61
+ ("atlas", 2 << 6 | 1), # Should use default activity
62
+ ("cms", 3 << 6 | 1), # Should use default activity
63
+ ("unknown", 1 << 6 | 1), # Should use all defaults
64
+ ],
65
+ )
66
+ def test_no_activity_parameter(vo, expected_result):
67
+ """Test behavior when no activity parameter is provided."""
68
+ result = get_scitag(vo)
69
+ assert result == expected_result
@@ -12,7 +12,7 @@ class DatasetManager:
12
12
  _tables["FC_MetaDatasets"] = {
13
13
  "Fields": {
14
14
  "DatasetID": "INT AUTO_INCREMENT",
15
- "DatasetName": "VARCHAR(128) CHARACTER SET utfmb4 COLLATE utf8mb4_bin NOT NULL",
15
+ "DatasetName": "VARCHAR(128) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL",
16
16
  "MetaQuery": "VARCHAR(512)",
17
17
  "DirID": "INT NOT NULL DEFAULT 0",
18
18
  "TotalSize": "BIGINT UNSIGNED NOT NULL",
@@ -41,6 +41,7 @@ class CreateMovingRequest:
41
41
  self.flags = [
42
42
  ("C", "CheckMigration", "Ensure the LFNs are migrated to tape before removing any replicas"),
43
43
  ("X", "Execute", "Put Requests, else dryrun"),
44
+ ("", "SourceOnly", "Only treat files that are already at the Source-SE"),
44
45
  ]
45
46
  self.registerSwitchesAndParseCommandLine()
46
47
  self.getLFNList()
@@ -208,6 +209,7 @@ class CreateMovingRequest:
208
209
 
209
210
  replicate = Operation()
210
211
  replicate.Type = "ReplicateAndRegister"
212
+ replicate.SourceSE = ",".join(self.switches.get("SourceSE", []))
211
213
  replicate.TargetSE = self.switches.get("TargetSE")
212
214
  self.addLFNs(replicate, lfnChunk, addPFN=True)
213
215
  request.addOperation(replicate)
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Classes and functions for easier management of the InstalledComponents database
3
3
  """
4
+
4
5
  import re
5
6
  import datetime
6
7
 
@@ -90,7 +91,7 @@ class Host(componentsBase):
90
91
  __table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8mb4"}
91
92
 
92
93
  hostID = Column("HostID", Integer, primary_key=True)
93
- hostName = Column("HostName", String(32), nullable=False)
94
+ hostName = Column("HostName", String(255), nullable=False)
94
95
  cpu = Column("CPU", String(64), nullable=False)
95
96
  installationList = relationship("InstalledComponent", backref="installationHost")
96
97
 
@@ -219,7 +220,7 @@ class HostLogging(componentsBase):
219
220
  __tablename__ = "HostLogging"
220
221
  __table_args__ = {"mysql_engine": "InnoDB", "mysql_charset": "utf8mb4"}
221
222
 
222
- hostName = Column("HostName", String(32), nullable=False, primary_key=True)
223
+ hostName = Column("HostName", String(255), nullable=False, primary_key=True)
223
224
  # status
224
225
  DIRAC = Column("DIRACVersion", String(64))
225
226
  Extension = Column("Extension", String(512))
@@ -11,7 +11,7 @@
11
11
  import textwrap
12
12
  from threading import Lock
13
13
 
14
- from cachetools import TTLCache, cached
14
+ from cachetools import TTLCache, cachedmethod
15
15
 
16
16
  from DIRAC import S_ERROR, S_OK, gLogger
17
17
  from DIRAC.ConfigurationSystem.Client.Helpers import Registry
@@ -25,6 +25,10 @@ from DIRAC.Resources.ProxyProvider.ProxyProviderFactory import ProxyProviderFact
25
25
 
26
26
  DEFAULT_MAIL_FROM = "proxymanager@diracgrid.org"
27
27
 
28
+ # Module-level cache for getProxyStrength method (shared across ProxyDB instances)
29
+ _get_proxy_strength_cache = TTLCache(maxsize=1000, ttl=600)
30
+ _get_proxy_strength_lock = Lock()
31
+
28
32
 
29
33
  class ProxyDB(DB):
30
34
  NOTIFICATION_TIMES = [2592000, 1296000]
@@ -398,7 +402,7 @@ class ProxyDB(DB):
398
402
  return S_ERROR(", ".join(errMsgs))
399
403
  return result
400
404
 
401
- @cached(TTLCache(maxsize=1000, ttl=600), lock=Lock())
405
+ @cachedmethod(lambda self: _get_proxy_strength_cache, lock=lambda self: _get_proxy_strength_lock)
402
406
  def getProxyStrength(self, userDN, userGroup=None, vomsAttr=None):
403
407
  """Load the proxy in cache corresponding to the criteria, and check its strength
404
408
 
@@ -601,13 +605,13 @@ class ProxyDB(DB):
601
605
  :return: S_OK(dict)/S_ERROR() -- dict contain attribute and VOMS VO
602
606
  """
603
607
  if requiredVOMSAttribute:
604
- return S_OK({"attribute": requiredVOMSAttribute, "VOMSVO": Registry.getVOMSVOForGroup(userGroup)})
608
+ return S_OK({"attribute": requiredVOMSAttribute, "VO": Registry.getVOForGroup(userGroup)})
605
609
 
606
610
  csVOMSMapping = Registry.getVOMSAttributeForGroup(userGroup)
607
611
  if not csVOMSMapping:
608
612
  return S_ERROR(f"No mapping defined for group {userGroup} in the CS")
609
613
 
610
- return S_OK({"attribute": csVOMSMapping, "VOMSVO": Registry.getVOMSVOForGroup(userGroup)})
614
+ return S_OK({"attribute": csVOMSMapping, "VO": Registry.getVOForGroup(userGroup)})
611
615
 
612
616
  def getVOMSProxy(self, userDN, userGroup, requiredLifeTime=None, requestedVOMSAttr=None):
613
617
  """Get proxy string from the Proxy Repository for use with userDN
@@ -624,7 +628,7 @@ class ProxyDB(DB):
624
628
  if not retVal["OK"]:
625
629
  return retVal
626
630
  vomsAttr = retVal["Value"]["attribute"]
627
- vomsVO = retVal["Value"]["VOMSVO"]
631
+ vomsVO = retVal["Value"]["VO"]
628
632
 
629
633
  # Look in the cache
630
634
  retVal = self.__getPemAndTimeLeft(userDN, userGroup, vomsAttr)
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Utilities for ComponentMonitoring features
3
3
  """
4
+
4
5
  import datetime
5
6
  import socket
6
7
 
@@ -10,11 +10,12 @@ DEFAULT_RT_EXPIRATION_TIME = 24 * 3600
10
10
  DEFAULT_AT_EXPIRATION_TIME = 1200
11
11
 
12
12
 
13
- def getIdProviderClient(userGroup: str, idProviderClientName: str = None):
13
+ def getIdProviderClient(userGroup: str, idProviderClientName: str = None, client_name_prefix: str = ""):
14
14
  """Get an IdProvider client
15
15
 
16
16
  :param userGroup: group name
17
17
  :param idProviderClientName: name of an identity provider in the DIRAC CS
18
+ :param client_name_prefix: prefix of the client in the CS options
18
19
  """
19
20
  # Get IdProvider credentials from CS
20
21
  if not idProviderClientName and userGroup:
@@ -23,7 +24,7 @@ def getIdProviderClient(userGroup: str, idProviderClientName: str = None):
23
24
  return S_ERROR(f"The {userGroup} group belongs to the VO that is not tied to any Identity Provider.")
24
25
 
25
26
  # Prepare the client instance of the appropriate IdP
26
- return IdProviderFactory().getIdProvider(idProviderClientName)
27
+ return IdProviderFactory().getIdProvider(idProviderClientName, client_name_prefix=client_name_prefix)
27
28
 
28
29
 
29
30
  def getCachedKey(
@@ -1,20 +1,25 @@
1
- import requests
2
-
3
- from cachetools import TTLCache, LRUCache, cached
4
- from cachetools.keys import hashkey
1
+ import os
2
+ import re
3
+ import subprocess
4
+ from collections.abc import Generator
5
+ from contextlib import contextmanager
5
6
  from pathlib import Path
6
7
  from tempfile import NamedTemporaryFile
8
+ import tempfile
7
9
  from typing import Any
8
- from collections.abc import Generator
9
- from DIRAC import gConfig
10
- from DIRAC.ConfigurationSystem.Client.Helpers import Registry
11
- from contextlib import contextmanager
12
10
 
11
+ import requests
12
+ from cachetools import LRUCache, TTLCache, cached
13
+ from cachetools.keys import hashkey
14
+ from diracx.cli.internal.legacy import _apply_fixes
15
+ from diracx.core.config.schema import Config as DiracxConfig
16
+ from diracx.core.models import TokenResponse
13
17
  from diracx.core.preferences import DiracxPreferences
14
-
15
18
  from diracx.core.utils import write_credentials
19
+ from pydantic import ValidationError
16
20
 
17
- from diracx.core.models import TokenResponse
21
+ from DIRAC import S_ERROR, S_OK, gConfig
22
+ from DIRAC.ConfigurationSystem.Client.Helpers import Registry
18
23
 
19
24
  try:
20
25
  from diracx.client.sync import SyncDiracClient
@@ -104,3 +109,29 @@ def TheImpersonator(credDict: dict[str, Any], *, source: str = "") -> Generator[
104
109
  client.__enter__()
105
110
  diracx_client_cache[token_location] = client
106
111
  yield client
112
+
113
+
114
+ def diracxVerifyConfig(cfgData):
115
+ """Verify CS config using DiracX config validation
116
+
117
+ Args:
118
+ cfgData: CFG data
119
+
120
+ Returns:
121
+ S_OK | S_ERROR: Value: diracx Config validation
122
+ """
123
+ os.environ["DIRAC_COMPAT_ENABLE_CS_CONVERSION"] = "true"
124
+ with tempfile.NamedTemporaryFile() as temp_cfg:
125
+ with tempfile.NamedTemporaryFile() as temp_diracx_cfg:
126
+ cfgData.writeToFile(temp_cfg.name)
127
+ cmd = ["dirac", "internal", "legacy", "cs-sync", temp_cfg.name, temp_diracx_cfg.name]
128
+ res = subprocess.run(cmd, capture_output=True, text=True, timeout=15)
129
+ os.environ.pop("DIRAC_COMPAT_ENABLE_CS_CONVERSION")
130
+ if res.returncode == 0:
131
+ return S_OK(res.stdout)
132
+ else:
133
+ err = res.stderr.strip()
134
+ match = re.search(r"(ValidationError:.*)", err, flags=re.DOTALL)
135
+ if match:
136
+ return S_ERROR(match.group(1))
137
+ return S_ERROR(err)
@@ -38,8 +38,8 @@ Script.disableCS()
38
38
  from DIRAC.ConfigurationSystem.Client.Helpers.Registry import (
39
39
  findDefaultGroupForDN,
40
40
  getGroupOption,
41
+ getVOForGroup,
41
42
  getVOMSAttributeForGroup,
42
- getVOMSVOForGroup,
43
43
  )
44
44
  from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
45
45
  from DIRAC.FrameworkSystem.private.authorization.utils.Tokens import (
@@ -285,7 +285,7 @@ class Params:
285
285
  if not (vomsAttr := getVOMSAttributeForGroup(self.group)):
286
286
  print(HTML(f"<yellow>No VOMS attribute foud for {self.group}</yellow>"))
287
287
  else:
288
- vo = getVOMSVOForGroup(self.group)
288
+ vo = getVOForGroup(self.group)
289
289
  if not (result := VOMS().setVOMSAttributes(self.outputFile, attribute=vomsAttr, vo=vo))["OK"]:
290
290
  return S_ERROR(f"Failed adding VOMS attribute: {result['Message']}")
291
291
  chain = result["Value"]
@@ -96,7 +96,7 @@ class ProxyInit:
96
96
  )
97
97
 
98
98
  resultVomsAttributes = VOMS.VOMS().setVOMSAttributes(
99
- self.__proxyGenerated, attribute=vomsAttr, vo=Registry.getVOMSVOForGroup(self.__piParams.diracGroup)
99
+ self.__proxyGenerated, attribute=vomsAttr, vo=Registry.getVOForGroup(self.__piParams.diracGroup)
100
100
  )
101
101
  if not resultVomsAttributes["OK"]:
102
102
  return S_ERROR(
@@ -2,6 +2,7 @@
2
2
  """
3
3
  Uninstallation of a DIRAC component
4
4
  """
5
+
5
6
  import socket
6
7
 
7
8
  from DIRAC import exit as DIRACexit
@@ -1463,9 +1463,8 @@ class Dirac(API):
1463
1463
  res = JobMonitoringClient().getJobsStatus(jobIDs)
1464
1464
  if not res["OK"]:
1465
1465
  return res
1466
- js = {k: v["Status"] for k, v in res["Value"].items()}
1467
1466
  # then filter
1468
- filteredJobs.update(_filterJobStateTransition(js, filterState))
1467
+ filteredJobs.update(_filterJobStateTransition(res["Value"], filterState))
1469
1468
 
1470
1469
  return WMSClient(useCertificates=self.useCertificates).deleteJob(list(filteredJobs))
1471
1470
 
@@ -1496,9 +1495,8 @@ class Dirac(API):
1496
1495
  res = JobMonitoringClient().getJobsStatus(jobIDs)
1497
1496
  if not res["OK"]:
1498
1497
  return res
1499
- js = {k: v["Status"] for k, v in res["Value"].items()}
1500
1498
  # then filter
1501
- jobIDsToReschedule = _filterJobStateTransition(js, JobStatus.RESCHEDULED)
1499
+ jobIDsToReschedule = _filterJobStateTransition(res["Value"], JobStatus.RESCHEDULED)
1502
1500
 
1503
1501
  return WMSClient(useCertificates=self.useCertificates).rescheduleJob(jobIDsToReschedule)
1504
1502
 
@@ -1528,9 +1526,8 @@ class Dirac(API):
1528
1526
  res = JobMonitoringClient().getJobsStatus(jobIDs)
1529
1527
  if not res["OK"]:
1530
1528
  return res
1531
- js = {k: v["Status"] for k, v in res["Value"].items()}
1532
1529
  # then filter
1533
- filteredJobs.update(_filterJobStateTransition(js, filterState))
1530
+ filteredJobs.update(_filterJobStateTransition(res["Value"], filterState))
1534
1531
 
1535
1532
  return WMSClient(useCertificates=self.useCertificates).killJob(list(filteredJobs))
1536
1533