DIRAC 9.0.0a66__py3-none-any.whl → 9.0.0a68__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. DIRAC/ConfigurationSystem/Client/Helpers/Resources.py +11 -43
  2. DIRAC/ConfigurationSystem/Client/Helpers/test/Test_Helpers.py +0 -16
  3. DIRAC/ConfigurationSystem/Client/VOMS2CSSynchronizer.py +1 -1
  4. DIRAC/Core/Security/IAMService.py +4 -3
  5. DIRAC/Core/Utilities/ClassAd/ClassAdLight.py +4 -290
  6. DIRAC/Core/Utilities/DErrno.py +1 -1
  7. DIRAC/Core/Utilities/JDL.py +1 -195
  8. DIRAC/Core/Utilities/List.py +1 -127
  9. DIRAC/Core/Utilities/ReturnValues.py +2 -2
  10. DIRAC/Core/Utilities/StateMachine.py +12 -178
  11. DIRAC/Core/Utilities/TimeUtilities.py +10 -253
  12. DIRAC/Core/Utilities/test/Test_JDL.py +0 -3
  13. DIRAC/DataManagementSystem/DB/FTS3DB.py +3 -0
  14. DIRAC/RequestManagementSystem/DB/test/RMSTestScenari.py +2 -0
  15. DIRAC/Resources/Catalog/RucioFileCatalogClient.py +1 -1
  16. DIRAC/Resources/Computing/test/Test_PoolComputingElement.py +2 -1
  17. DIRAC/Workflow/Modules/test/Test_Modules.py +5 -0
  18. DIRAC/WorkloadManagementSystem/Agent/test/Test_Agent_JobAgent.py +2 -0
  19. DIRAC/WorkloadManagementSystem/Agent/test/Test_Agent_PushJobAgent.py +1 -0
  20. DIRAC/WorkloadManagementSystem/Client/JobState/JobManifest.py +32 -261
  21. DIRAC/WorkloadManagementSystem/Client/JobStatus.py +8 -93
  22. DIRAC/WorkloadManagementSystem/DB/JobDBUtils.py +18 -147
  23. DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapper.py +4 -2
  24. DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py +21 -5
  25. DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapperTemplate.py +4 -0
  26. DIRAC/WorkloadManagementSystem/Utilities/JobModel.py +28 -199
  27. DIRAC/WorkloadManagementSystem/Utilities/JobStatusUtility.py +1 -63
  28. DIRAC/WorkloadManagementSystem/Utilities/ParametricJob.py +7 -171
  29. DIRAC/WorkloadManagementSystem/Utilities/test/Test_JobModel.py +1 -5
  30. DIRAC/WorkloadManagementSystem/Utilities/test/Test_ParametricJob.py +45 -128
  31. {dirac-9.0.0a66.dist-info → dirac-9.0.0a68.dist-info}/METADATA +2 -2
  32. {dirac-9.0.0a66.dist-info → dirac-9.0.0a68.dist-info}/RECORD +36 -38
  33. DIRAC/Core/Utilities/test/Test_List.py +0 -150
  34. DIRAC/Core/Utilities/test/Test_Time.py +0 -88
  35. {dirac-9.0.0a66.dist-info → dirac-9.0.0a68.dist-info}/WHEEL +0 -0
  36. {dirac-9.0.0a66.dist-info → dirac-9.0.0a68.dist-info}/entry_points.txt +0 -0
  37. {dirac-9.0.0a66.dist-info → dirac-9.0.0a68.dist-info}/licenses/LICENSE +0 -0
  38. {dirac-9.0.0a66.dist-info → dirac-9.0.0a68.dist-info}/top_level.txt +0 -0
@@ -344,24 +344,40 @@ def test_processQuickExecutionNoWatchdog(mocker):
344
344
 
345
345
 
346
346
  @pytest.mark.slow
347
- def test_processSubprocessFailureNoPid(mocker):
348
- """Test the process method of the JobWrapper class: the subprocess fails and no PID is returned."""
347
+ @pytest.mark.parametrize("expect_failure", [True, False])
348
+ def test_processSubprocessFailureNoPid(mocker, monkeypatch, expect_failure):
349
+ """Test the process method of the JobWrapper class: the subprocess fails and no PID is returned.
350
+
351
+ expect_failure is used to ensure that the JobWrapper is functioning correctly even with the other patching
352
+ that is applied in the test (e.g. CHILD_PID_POLL_INTERVALS).
353
+ """
349
354
  # Test failure in starting the payload process
350
355
  jw = JobWrapper()
351
356
  jw.jobArgs = {}
352
357
 
353
358
  mocker.patch.object(jw, "_JobWrapper__report")
354
359
  mocker.patch.object(jw, "_JobWrapper__setJobParam")
360
+ monkeypatch.setattr(
361
+ "DIRAC.WorkloadManagementSystem.JobWrapper.JobWrapper.CHILD_PID_POLL_INTERVALS", [0.1, 0.2, 0.3, 0.4, 0.5]
362
+ )
363
+
355
364
  mock_exeThread = mocker.Mock()
356
365
  mock_exeThread.start.side_effect = lambda: time.sleep(0.1)
357
- mocker.patch("DIRAC.WorkloadManagementSystem.JobWrapper.JobWrapper.ExecutionThread", return_value=mock_exeThread)
366
+ if expect_failure:
367
+ mocker.patch(
368
+ "DIRAC.WorkloadManagementSystem.JobWrapper.JobWrapper.ExecutionThread", return_value=mock_exeThread
369
+ )
358
370
 
359
371
  with tempfile.NamedTemporaryFile(delete=True) as std_out, tempfile.NamedTemporaryFile(delete=True) as std_err:
360
372
  jw.outputFile = std_out.name
361
373
  jw.errorFile = std_err.name
362
374
  result = jw.process(command="mock_command", env={})
363
- assert not result["OK"]
364
- assert "Payload process could not start after 140 seconds" in result["Message"]
375
+
376
+ if expect_failure:
377
+ assert not result["OK"]
378
+ assert "Payload process could not start after 1.5 seconds" in result["Message"]
379
+ else:
380
+ assert result["OK"]
365
381
 
366
382
 
367
383
  # -------------------------------------------------------------------------------------------------
@@ -72,6 +72,7 @@ def extraOptions():
72
72
  os.remove(extraOptions)
73
73
 
74
74
 
75
+ @pytest.mark.slow
75
76
  def test_createAndExecuteJobWrapperTemplate_success(extraOptions):
76
77
  """Test the creation of a classical job wrapper and its execution:
77
78
  There is an extra option cfg file to be passed to the job wrapper.
@@ -144,6 +145,7 @@ def test_createAndExecuteJobWrapperTemplate_success(extraOptions):
144
145
  shutil.rmtree(os.path.join(os.getcwd(), "job"))
145
146
 
146
147
 
148
+ @pytest.mark.slow
147
149
  def test_createAndExecuteJobWrapperTemplate_missingExtraOptions():
148
150
  """Test the creation of a classical job wrapper and its execution:
149
151
  There is no extra options to be passed to the job wrapper.
@@ -205,6 +207,7 @@ def test_createAndExecuteJobWrapperTemplate_missingExtraOptions():
205
207
  shutil.rmtree(os.path.join(os.getcwd(), "job"))
206
208
 
207
209
 
210
+ @pytest.mark.slow
208
211
  def test_createAndExecuteRelocatedJobWrapperTemplate_success(extraOptions):
209
212
  """Test the creation of a relocated job wrapper and its execution:
210
213
  This is generally used when containers are involved (SingularityCE).
@@ -325,6 +328,7 @@ def test_createAndExecuteRelocatedJobWrapperTemplate_success(extraOptions):
325
328
  shutil.rmtree(wrapperPath)
326
329
 
327
330
 
331
+ @pytest.mark.slow
328
332
  def test_createAndExecuteJobWrapperOfflineTemplate_success(extraOptions):
329
333
  """Test the creation of an offline job wrapper and its execution:
330
334
  This is generally used when pre/post processing operations are executed locally,
@@ -1,209 +1,38 @@
1
- """ This module contains the JobModel class, which is used to validate the job description """
1
+ from __future__ import annotations
2
2
 
3
- # pylint: disable=no-self-argument, no-self-use, invalid-name, missing-function-docstring
4
-
5
- from collections.abc import Iterable
6
- from typing import Any, Annotated, TypeAlias, Self
7
-
8
- from pydantic import BaseModel, BeforeValidator, model_validator, field_validator, ConfigDict
3
+ from typing import ClassVar
4
+ from pydantic import PrivateAttr
5
+ from DIRACCommon.WorkloadManagementSystem.Utilities.JobModel import * # noqa: F401, F403
9
6
 
10
7
  from DIRAC import gLogger
11
- from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
12
- from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getDIRACPlatforms, getSites
13
-
14
-
15
- # HACK: Convert appropriate iterables into sets
16
- def default_set_validator(value):
17
- if value is None:
18
- return set()
19
- elif not isinstance(value, Iterable):
20
- return value
21
- elif isinstance(value, (str, bytes, bytearray)):
22
- return value
23
- else:
24
- return set(value)
25
-
26
-
27
- CoercibleSetStr: TypeAlias = Annotated[set[str], BeforeValidator(default_set_validator)]
28
-
29
-
30
- class BaseJobDescriptionModel(BaseModel):
31
- """Base model for the job description (not parametric)"""
32
-
33
- model_config = ConfigDict(validate_assignment=True)
34
-
35
- arguments: str = ""
36
- bannedSites: CoercibleSetStr = set()
37
- # TODO: This should use a field factory
38
- cpuTime: int = Operations().getValue("JobDescription/DefaultCPUTime", 86400)
39
- executable: str
40
- executionEnvironment: dict = None
41
- gridCE: str = ""
42
- inputSandbox: CoercibleSetStr = set()
43
- inputData: CoercibleSetStr = set()
44
- inputDataPolicy: str = ""
45
- jobConfigArgs: str = ""
46
- jobGroup: str = ""
47
- jobType: str = "User"
48
- jobName: str = "Name"
49
- # TODO: This should be an StrEnum
50
- logLevel: str = "INFO"
51
- # TODO: This can't be None with this type hint
52
- maxNumberOfProcessors: int = None
53
- minNumberOfProcessors: int = 1
54
- outputData: CoercibleSetStr = set()
55
- outputPath: str = ""
56
- outputSandbox: CoercibleSetStr = set()
57
- outputSE: str = ""
58
- platform: str = ""
59
- # TODO: This should use a field factory
60
- priority: int = Operations().getValue("JobDescription/DefaultPriority", 1)
61
- sites: CoercibleSetStr = set()
62
- stderr: str = "std.err"
63
- stdout: str = "std.out"
64
- tags: CoercibleSetStr = set()
65
- extraFields: dict[str, Any] = {}
66
-
67
- @field_validator("cpuTime")
68
- def checkCPUTimeBounds(cls, v):
69
- minCPUTime = Operations().getValue("JobDescription/MinCPUTime", 100)
70
- maxCPUTime = Operations().getValue("JobDescription/MaxCPUTime", 500000)
71
- if not minCPUTime <= v <= maxCPUTime:
72
- raise ValueError(f"cpuTime out of bounds (must be between {minCPUTime} and {maxCPUTime})")
73
- return v
74
-
75
- @field_validator("executable")
76
- def checkExecutableIsNotAnEmptyString(cls, v: str):
77
- if not v:
78
- raise ValueError("executable must not be an empty string")
79
- return v
80
-
81
- @field_validator("jobType")
82
- def checkJobTypeIsAllowed(cls, v: str):
83
- jobTypes = Operations().getValue("JobDescription/AllowedJobTypes", ["User", "Test", "Hospital"])
84
- transformationTypes = Operations().getValue("Transformations/DataProcessing", [])
85
- allowedTypes = jobTypes + transformationTypes
86
- if v not in allowedTypes:
87
- raise ValueError(f"jobType '{v}' is not allowed for this kind of user (must be in {allowedTypes})")
88
- return v
89
-
90
- @field_validator("inputData")
91
- def checkInputDataDoesntContainDoubleSlashes(cls, v):
92
- if v:
93
- for lfn in v:
94
- if lfn.find("//") > -1:
95
- raise ValueError("Input data contains //")
96
- return v
97
-
98
- @field_validator("inputData")
99
- def addLFNPrefixIfStringStartsWithASlash(cls, v: set[str]):
100
- if v:
101
- v = {lfn.strip() for lfn in v if lfn.strip()}
102
- v = {f"LFN:{lfn}" if lfn.startswith("/") else lfn for lfn in v}
103
-
104
- for lfn in v:
105
- if not lfn.startswith("LFN:/"):
106
- raise ValueError("Input data files must start with LFN:/")
107
- return v
108
-
109
- @model_validator(mode="after")
110
- def checkNumberOfInputDataFiles(self) -> Self:
111
- if self.inputData:
112
- maxInputDataFiles = Operations().getValue("JobDescription/MaxInputData", 500)
113
- if self.jobType == "User" and len(self.inputData) >= maxInputDataFiles:
114
- raise ValueError(f"inputData contains too many files (must contain at most {maxInputDataFiles})")
115
- return self
116
-
117
- @field_validator("inputSandbox")
118
- def checkLFNSandboxesAreWellFormated(cls, v: set[str]):
119
- for inputSandbox in v:
120
- if inputSandbox.startswith("LFN:") and not inputSandbox.startswith("LFN:/"):
121
- raise ValueError("LFN files must start by LFN:/")
122
- return v
123
-
124
- @field_validator("logLevel")
125
- def checkLogLevelIsValid(cls, v: str):
126
- v = v.upper()
127
- possibleLogLevels = gLogger.getAllPossibleLevels()
128
- if v not in possibleLogLevels:
129
- raise ValueError(f"Log level {v} not in {possibleLogLevels}")
130
- return v
131
-
132
- @field_validator("minNumberOfProcessors")
133
- def checkMinNumberOfProcessorsBounds(cls, v):
134
- minNumberOfProcessors = Operations().getValue("JobDescription/MinNumberOfProcessors", 1)
135
- maxNumberOfProcessors = Operations().getValue("JobDescription/MaxNumberOfProcessors", 1024)
136
- if not minNumberOfProcessors <= v <= maxNumberOfProcessors:
137
- raise ValueError(
138
- f"minNumberOfProcessors out of bounds (must be between {minNumberOfProcessors} and {maxNumberOfProcessors})"
139
- )
140
- return v
141
-
142
- @field_validator("maxNumberOfProcessors")
143
- def checkMaxNumberOfProcessorsBounds(cls, v):
144
- minNumberOfProcessors = Operations().getValue("JobDescription/MinNumberOfProcessors", 1)
145
- maxNumberOfProcessors = Operations().getValue("JobDescription/MaxNumberOfProcessors", 1024)
146
- if not minNumberOfProcessors <= v <= maxNumberOfProcessors:
147
- raise ValueError(
148
- f"minNumberOfProcessors out of bounds (must be between {minNumberOfProcessors} and {maxNumberOfProcessors})"
149
- )
150
- return v
151
-
152
- @model_validator(mode="after")
153
- def checkThatMaxNumberOfProcessorsIsGreaterThanMinNumberOfProcessors(self) -> Self:
154
- if self.maxNumberOfProcessors:
155
- if self.maxNumberOfProcessors < self.minNumberOfProcessors:
156
- raise ValueError("maxNumberOfProcessors must be greater than minNumberOfProcessors")
157
- return self
158
-
159
- @model_validator(mode="after")
160
- def addTagsDependingOnNumberOfProcessors(self) -> Self:
161
- if self.minNumberOfProcessors == self.maxNumberOfProcessors:
162
- self.tags.add(f"{self.minNumberOfProcessors}Processors")
163
- if self.minNumberOfProcessors > 1:
164
- self.tags.add("MultiProcessor")
165
- return self
8
+ from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getSites
166
9
 
167
- @field_validator("sites")
168
- def checkSites(cls, v: set[str]):
169
- if v:
170
- res = getSites()
171
- if not res["OK"]:
172
- raise ValueError(res["Message"])
173
- invalidSites = v - set(res["Value"]).union({"ANY"})
174
- if invalidSites:
175
- raise ValueError(f"Invalid sites: {' '.join(invalidSites)}")
176
- return v
177
10
 
178
- @model_validator(mode="after")
179
- def checkThatSitesAndBannedSitesAreNotMutuallyExclusive(self) -> Self:
180
- if self.sites and self.bannedSites:
181
- while self.bannedSites:
182
- self.sites.discard(self.bannedSites.pop())
183
- if not self.sites:
184
- raise ValueError("sites and bannedSites are mutually exclusive")
185
- return self
11
+ def _make_model_config(cls=None) -> BaseJobDescriptionModelConfg:
12
+ from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
186
13
 
187
- @field_validator("priority")
188
- def checkPriorityBounds(cls, v):
189
- minPriority = Operations().getValue("JobDescription/MinPriority", 0)
190
- maxPriority = Operations().getValue("JobDescription/MaxPriority", 10)
191
- if not minPriority <= v <= maxPriority:
192
- raise ValueError(f"priority out of bounds (must be between {minPriority} and {maxPriority})")
193
- return v
14
+ ops = Operations()
15
+ allowedJobTypes = ops.getValue("JobDescription/AllowedJobTypes", ["User", "Test", "Hospital"])
16
+ allowedJobTypes += ops.getValue("Transformations/DataProcessing", [])
17
+ return {
18
+ "cpuTime": ops.getValue("JobDescription/DefaultCPUTime", 86400),
19
+ "priority": ops.getValue("JobDescription/DefaultPriority", 1),
20
+ "minCPUTime": ops.getValue("JobDescription/MinCPUTime", 100),
21
+ "maxCPUTime": ops.getValue("JobDescription/MaxCPUTime", 500000),
22
+ "allowedJobTypes": allowedJobTypes,
23
+ "maxInputDataFiles": ops.getValue("JobDescription/MaxInputData", 500),
24
+ "minNumberOfProcessors": ops.getValue("JobDescription/MinNumberOfProcessors", 1),
25
+ "maxNumberOfProcessors": ops.getValue("JobDescription/MaxNumberOfProcessors", 1024),
26
+ "minPriority": ops.getValue("JobDescription/MinPriority", 0),
27
+ "maxPriority": ops.getValue("JobDescription/MaxPriority", 10),
28
+ "possibleLogLevels": gLogger.getAllPossibleLevels(),
29
+ "sites": getSites(),
30
+ }
194
31
 
195
32
 
196
- class JobDescriptionModel(BaseJobDescriptionModel):
197
- """Model for the job description (non parametric job with user credentials, i.e server side)"""
33
+ class BaseJobDescriptionModel(BaseJobDescriptionModel): # noqa: F405 pylint: disable=function-redefined
34
+ _config_builder: ClassVar = _make_model_config
198
35
 
199
- owner: str
200
- ownerGroup: str
201
- vo: str
202
36
 
203
- @model_validator(mode="after")
204
- def checkLFNMatchesREGEX(self) -> Self:
205
- if self.inputData:
206
- for lfn in self.inputData:
207
- if not lfn.startswith(f"LFN:/{self.vo}/"):
208
- raise ValueError(f"Input data not correctly specified (must start with LFN:/{self.vo}/)")
209
- return self
37
+ class JobDescriptionModel(JobDescriptionModel): # noqa: F405 pylint: disable=function-redefined
38
+ _config_builder: ClassVar = _make_model_config
@@ -9,6 +9,7 @@ from DIRAC import S_ERROR, S_OK, gLogger
9
9
  from DIRAC.Core.Utilities import TimeUtilities
10
10
  from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
11
11
  from DIRAC.WorkloadManagementSystem.Client import JobStatus
12
+ from DIRACCommon.WorkloadManagementSystem.Utilities.JobStatusUtility import getStartAndEndTime, getNewStatus
12
13
 
13
14
  if TYPE_CHECKING:
14
15
  from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
@@ -180,66 +181,3 @@ class JobStatusUtility:
180
181
  return result
181
182
 
182
183
  return S_OK((attrNames, attrValues))
183
-
184
-
185
- def getStartAndEndTime(startTime, endTime, updateTimes, timeStamps, statusDict):
186
- newStat = ""
187
- firstUpdate = TimeUtilities.toEpoch(TimeUtilities.fromString(updateTimes[0]))
188
- for ts, st in timeStamps:
189
- if firstUpdate >= ts:
190
- newStat = st
191
- # Pick up start and end times from all updates
192
- for updTime in updateTimes:
193
- sDict = statusDict[updTime]
194
- newStat = sDict.get("Status", newStat)
195
-
196
- if not startTime and newStat == JobStatus.RUNNING:
197
- # Pick up the start date when the job starts running if not existing
198
- startTime = updTime
199
- elif not endTime and newStat in JobStatus.JOB_FINAL_STATES:
200
- # Pick up the end time when the job is in a final status
201
- endTime = updTime
202
-
203
- return startTime, endTime
204
-
205
-
206
- def getNewStatus(
207
- jobID: int,
208
- updateTimes: list[datetime],
209
- lastTime: datetime,
210
- statusDict: dict[datetime, Any],
211
- currentStatus,
212
- force: bool,
213
- log,
214
- ):
215
- status = ""
216
- minor = ""
217
- application = ""
218
- # Get the last status values looping on the most recent upupdateTimes in chronological order
219
- for updTime in [dt for dt in updateTimes if dt >= lastTime]:
220
- sDict = statusDict[updTime]
221
- log.debug(f"\tTime {updTime} - Statuses {str(sDict)}")
222
- status = sDict.get("Status", currentStatus)
223
- # evaluate the state machine if the status is changing
224
- if not force and status != currentStatus:
225
- res = JobStatus.JobsStateMachine(currentStatus).getNextState(status)
226
- if not res["OK"]:
227
- return res
228
- newStat = res["Value"]
229
- # If the JobsStateMachine does not accept the candidate, don't update
230
- if newStat != status:
231
- # keeping the same status
232
- log.error(
233
- f"Job Status Error: {jobID} can't move from {currentStatus} to {status}: using {newStat}",
234
- )
235
- status = newStat
236
- sDict["Status"] = newStat
237
- # Change the source to indicate this is not what was requested
238
- source = sDict.get("Source", "")
239
- sDict["Source"] = source + "(SM)"
240
- # at this stage status == newStat. Set currentStatus to this new status
241
- currentStatus = newStat
242
-
243
- minor = sDict.get("MinorStatus", minor)
244
- application = sDict.get("ApplicationStatus", application)
245
- return S_OK((status, minor, application))
@@ -4,176 +4,12 @@
4
4
  getParameterVectorLength() - to get the total size of the bunch of parametric jobs
5
5
  generateParametricJobs() - to get a list of expanded descriptions of all the jobs
6
6
  """
7
- import re
8
7
 
9
- from DIRAC.Core.Utilities.ClassAd.ClassAdLight import ClassAd
10
- from DIRAC.Core.Utilities.ReturnValues import S_OK, S_ERROR
11
- from DIRAC.Core.Utilities.DErrno import EWMSJDL
8
+ # Import from DIRACCommon for backward compatibility
9
+ from DIRACCommon.WorkloadManagementSystem.Utilities.ParametricJob import (
10
+ getParameterVectorLength,
11
+ generateParametricJobs,
12
+ )
12
13
 
13
-
14
- def __getParameterSequence(nPar, parList=[], parStart=1, parStep=0, parFactor=1):
15
- if parList:
16
- if nPar != len(parList):
17
- return []
18
- else:
19
- parameterList = list(parList)
20
- else:
21
- # The first parameter must have the same type as the other ones even if not defined explicitly
22
- parameterList = [parStart * type(parFactor)(1) + type(parStep)(0)]
23
- for np in range(1, nPar):
24
- parameterList.append(parameterList[np - 1] * parFactor + parStep)
25
-
26
- return parameterList
27
-
28
-
29
- def getParameterVectorLength(jobClassAd):
30
- """Get the length of parameter vector in the parametric job description
31
-
32
- :param jobClassAd: ClassAd job description object
33
- :return: result structure with the Value: int number of parameter values, None if not a parametric job
34
- """
35
-
36
- nParValues = None
37
- attributes = jobClassAd.getAttributes()
38
- for attribute in attributes:
39
- if attribute.startswith("Parameters"):
40
- if jobClassAd.isAttributeList(attribute):
41
- parameterList = jobClassAd.getListFromExpression(attribute)
42
- nThisParValues = len(parameterList)
43
- else:
44
- nThisParValues = jobClassAd.getAttributeInt(attribute)
45
- if nParValues is not None and nParValues != nThisParValues:
46
- return S_ERROR(
47
- EWMSJDL,
48
- "Different length of parameter vectors: for %s, %s != %d" % (attribute, nParValues, nThisParValues),
49
- )
50
- nParValues = nThisParValues
51
- if nParValues is not None and nParValues <= 0:
52
- return S_ERROR(EWMSJDL, "Illegal number of job parameters %d" % (nParValues))
53
- return S_OK(nParValues)
54
-
55
-
56
- def __updateAttribute(classAd, attribute, parName, parValue):
57
- # If there is something to do:
58
- pattern = r"%%\(%s\)s" % parName
59
- if parName == "0":
60
- pattern = "%s"
61
- expr = classAd.get_expression(attribute)
62
- if not re.search(pattern, expr):
63
- return False
64
-
65
- pattern = "%%(%s)s" % parName
66
- if parName == "0":
67
- pattern = "%s"
68
-
69
- parValue = parValue.strip()
70
- if classAd.isAttributeList(attribute):
71
- parValue = parValue.strip()
72
- if parValue.startswith("{"):
73
- parValue = parValue.lstrip("{").rstrip("}").strip()
74
-
75
- expr = classAd.get_expression(attribute)
76
- newexpr = expr.replace(pattern, str(parValue))
77
- classAd.set_expression(attribute, newexpr)
78
- return True
79
-
80
-
81
- def generateParametricJobs(jobClassAd):
82
- """Generate a series of ClassAd job descriptions expanding
83
- job parameters
84
-
85
- :param jobClassAd: ClassAd job description object
86
- :return: list of ClassAd job description objects
87
- """
88
- if not jobClassAd.lookupAttribute("Parameters"):
89
- return S_OK([jobClassAd.asJDL()])
90
-
91
- result = getParameterVectorLength(jobClassAd)
92
- if not result["OK"]:
93
- return result
94
- nParValues = result["Value"]
95
- if nParValues is None:
96
- return S_ERROR(EWMSJDL, "Can not determine the number of job parameters")
97
-
98
- parameterDict = {}
99
- attributes = jobClassAd.getAttributes()
100
- for attribute in attributes:
101
- for key in ["Parameters", "ParameterStart", "ParameterStep", "ParameterFactor"]:
102
- if attribute.startswith(key):
103
- seqID = "0" if "." not in attribute else attribute.split(".")[1]
104
- parameterDict.setdefault(seqID, {})
105
- if key == "Parameters":
106
- if jobClassAd.isAttributeList(attribute):
107
- parList = jobClassAd.getListFromExpression(attribute)
108
- if len(parList) != nParValues:
109
- return S_ERROR(EWMSJDL, "Inconsistent parametric job description")
110
- parameterDict[seqID]["ParameterList"] = parList
111
- else:
112
- if attribute != "Parameters":
113
- return S_ERROR(EWMSJDL, "Inconsistent parametric job description")
114
- nPar = jobClassAd.getAttributeInt(attribute)
115
- if nPar is None:
116
- value = jobClassAd.get_expression(attribute)
117
- return S_ERROR(EWMSJDL, f"Inconsistent parametric job description: {attribute}={value}")
118
- parameterDict[seqID]["Parameters"] = nPar
119
- else:
120
- value = jobClassAd.getAttributeInt(attribute)
121
- if value is None:
122
- value = jobClassAd.getAttributeFloat(attribute)
123
- if value is None:
124
- value = jobClassAd.get_expression(attribute)
125
- return S_ERROR(f"Illegal value for {attribute} JDL field: {value}")
126
- parameterDict[seqID][key] = value
127
-
128
- if "0" in parameterDict and not parameterDict.get("0"):
129
- parameterDict.pop("0")
130
-
131
- parameterLists = {}
132
- for seqID in parameterDict:
133
- parList = __getParameterSequence(
134
- nParValues,
135
- parList=parameterDict[seqID].get("ParameterList", []),
136
- parStart=parameterDict[seqID].get("ParameterStart", 1),
137
- parStep=parameterDict[seqID].get("ParameterStep", 0),
138
- parFactor=parameterDict[seqID].get("ParameterFactor", 1),
139
- )
140
- if not parList:
141
- return S_ERROR(EWMSJDL, "Inconsistent parametric job description")
142
-
143
- parameterLists[seqID] = parList
144
-
145
- jobDescList = []
146
- jobDesc = jobClassAd.asJDL()
147
- # Width of the sequential parameter number
148
- zLength = len(str(nParValues - 1))
149
- for n in range(nParValues):
150
- newJobDesc = jobDesc
151
- newJobDesc = newJobDesc.replace("%n", str(n).zfill(zLength))
152
- newClassAd = ClassAd(newJobDesc)
153
- for seqID in parameterLists:
154
- parameter = parameterLists[seqID][n]
155
- for attribute in newClassAd.getAttributes():
156
- __updateAttribute(newClassAd, attribute, seqID, str(parameter))
157
-
158
- for seqID in parameterLists:
159
- for attribute in ["Parameters", "ParameterStart", "ParameterStep", "ParameterFactor"]:
160
- if seqID == "0":
161
- newClassAd.deleteAttribute(attribute)
162
- else:
163
- newClassAd.deleteAttribute(f"{attribute}.{seqID}")
164
-
165
- parameter = parameterLists[seqID][n]
166
- if seqID == "0":
167
- attribute = "Parameter"
168
- else:
169
- attribute = f"Parameter.{seqID}"
170
- if isinstance(parameter, str) and parameter.startswith("{"):
171
- newClassAd.insertAttributeInt(attribute, str(parameter))
172
- else:
173
- newClassAd.insertAttributeString(attribute, str(parameter))
174
-
175
- newClassAd.insertAttributeInt("ParameterNumber", n)
176
- newJDL = newClassAd.asJDL()
177
- jobDescList.append(newJDL)
178
-
179
- return S_OK(jobDescList)
14
+ # Re-export for backward compatibility
15
+ __all__ = ["getParameterVectorLength", "generateParametricJobs"]
@@ -175,11 +175,7 @@ def test_logLevelValidator_invalid():
175
175
 
176
176
  def test_platformValidator_valid():
177
177
  """Test the platform validator with valid input."""
178
- with patch(
179
- "DIRAC.WorkloadManagementSystem.Utilities.JobModel.getDIRACPlatforms",
180
- return_value=S_OK(["x86_64-slc6-gcc62-opt"]),
181
- ):
182
- job = BaseJobDescriptionModel(executable=EXECUTABLE, platform="x86_64-slc6-gcc62-opt")
178
+ job = BaseJobDescriptionModel(executable=EXECUTABLE, platform="x86_64-slc6-gcc62-opt")
183
179
  assert job.platform == "x86_64-slc6-gcc62-opt"
184
180
 
185
181