diracx-db 0.0.1a21__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of diracx-db might be problematic. Click here for more details.
- diracx/db/__main__.py +1 -1
- diracx/db/exceptions.py +4 -1
- diracx/db/os/job_parameters.py +25 -7
- diracx/db/os/utils.py +18 -11
- diracx/db/sql/auth/db.py +113 -78
- diracx/db/sql/auth/schema.py +32 -24
- diracx/db/sql/dummy/db.py +5 -17
- diracx/db/sql/dummy/schema.py +8 -6
- diracx/db/sql/job/db.py +155 -205
- diracx/db/sql/job/schema.py +115 -59
- diracx/db/sql/job_logging/db.py +60 -143
- diracx/db/sql/job_logging/schema.py +54 -15
- diracx/db/sql/pilot_agents/db.py +0 -1
- diracx/db/sql/pilot_agents/schema.py +26 -23
- diracx/db/sql/sandbox_metadata/db.py +164 -57
- diracx/db/sql/sandbox_metadata/schema.py +9 -4
- diracx/db/sql/task_queue/db.py +44 -125
- diracx/db/sql/task_queue/schema.py +2 -0
- diracx/db/sql/utils/__init__.py +29 -451
- diracx/db/sql/utils/base.py +461 -0
- diracx/db/sql/utils/functions.py +142 -0
- diracx/db/sql/utils/types.py +137 -0
- {diracx_db-0.0.1a21.dist-info → diracx_db-0.0.6.dist-info}/METADATA +8 -6
- diracx_db-0.0.6.dist-info/RECORD +37 -0
- {diracx_db-0.0.1a21.dist-info → diracx_db-0.0.6.dist-info}/WHEEL +1 -2
- {diracx_db-0.0.1a21.dist-info → diracx_db-0.0.6.dist-info}/entry_points.txt +2 -2
- diracx/db/sql/utils/job.py +0 -574
- diracx_db-0.0.1a21.dist-info/RECORD +0 -36
- diracx_db-0.0.1a21.dist-info/top_level.txt +0 -1
diracx/db/sql/job/db.py
CHANGED
|
@@ -1,22 +1,23 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
__all__ = ["JobDB"]
|
|
4
|
+
|
|
3
5
|
from datetime import datetime, timezone
|
|
4
|
-
from typing import TYPE_CHECKING, Any
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Iterable
|
|
5
7
|
|
|
6
|
-
from sqlalchemy import bindparam,
|
|
7
|
-
from sqlalchemy.exc import IntegrityError, NoResultFound
|
|
8
|
+
from sqlalchemy import bindparam, case, delete, literal, select, update
|
|
8
9
|
|
|
9
10
|
if TYPE_CHECKING:
|
|
10
11
|
from sqlalchemy.sql.elements import BindParameter
|
|
11
|
-
from
|
|
12
|
-
from diracx.core.models import (
|
|
13
|
-
LimitedJobStatusReturn,
|
|
14
|
-
SearchSpec,
|
|
15
|
-
SortSpec,
|
|
16
|
-
)
|
|
12
|
+
from sqlalchemy.sql import expression
|
|
17
13
|
|
|
18
|
-
from
|
|
14
|
+
from diracx.core.exceptions import InvalidQueryError
|
|
15
|
+
from diracx.core.models import JobCommand, SearchSpec, SortSpec
|
|
16
|
+
|
|
17
|
+
from ..utils import BaseSQLDB, _get_columns
|
|
18
|
+
from ..utils.functions import utcnow
|
|
19
19
|
from .schema import (
|
|
20
|
+
HeartBeatLoggingInfo,
|
|
20
21
|
InputData,
|
|
21
22
|
JobCommands,
|
|
22
23
|
JobDBBase,
|
|
@@ -25,38 +26,29 @@ from .schema import (
|
|
|
25
26
|
)
|
|
26
27
|
|
|
27
28
|
|
|
28
|
-
def _get_columns(table, parameters):
|
|
29
|
-
columns = [x for x in table.columns]
|
|
30
|
-
if parameters:
|
|
31
|
-
if unrecognised_parameters := set(parameters) - set(table.columns.keys()):
|
|
32
|
-
raise InvalidQueryError(
|
|
33
|
-
f"Unrecognised parameters requested {unrecognised_parameters}"
|
|
34
|
-
)
|
|
35
|
-
columns = [c for c in columns if c.name in parameters]
|
|
36
|
-
return columns
|
|
37
|
-
|
|
38
|
-
|
|
39
29
|
class JobDB(BaseSQLDB):
|
|
40
30
|
metadata = JobDBBase.metadata
|
|
41
31
|
|
|
32
|
+
# Field names which should be stored in the HeartBeatLoggingInfo table
|
|
33
|
+
heartbeat_fields = {
|
|
34
|
+
"LoadAverage",
|
|
35
|
+
"MemoryUsed",
|
|
36
|
+
"Vsize",
|
|
37
|
+
"AvailableDiskSpace",
|
|
38
|
+
"CPUConsumed",
|
|
39
|
+
"WallClockTime",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
42
|
# TODO: this is copied from the DIRAC JobDB
|
|
43
|
-
# but is
|
|
43
|
+
# but is overwritten in LHCbDIRAC, so we need
|
|
44
44
|
# to find a way to make it dynamic
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
async def summary(self, group_by, search) -> list[dict[str, str | int]]:
|
|
48
|
-
columns = _get_columns(Jobs.__table__, group_by)
|
|
49
|
-
|
|
50
|
-
stmt = select(*columns, func.count(Jobs.JobID).label("count"))
|
|
51
|
-
stmt = apply_search_filters(Jobs.__table__.columns.__getitem__, stmt, search)
|
|
52
|
-
stmt = stmt.group_by(*columns)
|
|
45
|
+
jdl_2_db_parameters = ["JobName", "JobType", "JobGroup"]
|
|
53
46
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
]
|
|
47
|
+
async def summary(
|
|
48
|
+
self, group_by: list[str], search: list[SearchSpec]
|
|
49
|
+
) -> list[dict[str, str | int]]:
|
|
50
|
+
"""Get a summary of the jobs."""
|
|
51
|
+
return await self._summary(table=Jobs, group_by=group_by, search=search)
|
|
60
52
|
|
|
61
53
|
async def search(
|
|
62
54
|
self,
|
|
@@ -68,36 +60,35 @@ class JobDB(BaseSQLDB):
|
|
|
68
60
|
per_page: int = 100,
|
|
69
61
|
page: int | None = None,
|
|
70
62
|
) -> tuple[int, list[dict[Any, Any]]]:
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
# Calculate total count before applying pagination
|
|
83
|
-
total_count_subquery = stmt.alias()
|
|
84
|
-
total_count_stmt = select(func.count()).select_from(total_count_subquery)
|
|
85
|
-
total = (await self.conn.execute(total_count_stmt)).scalar_one()
|
|
63
|
+
"""Search for jobs in the database."""
|
|
64
|
+
return await self._search(
|
|
65
|
+
table=Jobs,
|
|
66
|
+
parameters=parameters,
|
|
67
|
+
search=search,
|
|
68
|
+
sorts=sorts,
|
|
69
|
+
distinct=distinct,
|
|
70
|
+
per_page=per_page,
|
|
71
|
+
page=page,
|
|
72
|
+
)
|
|
86
73
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
74
|
+
async def create_job(self, compressed_original_jdl: str):
|
|
75
|
+
"""Used to insert a new job with original JDL. Returns inserted job id."""
|
|
76
|
+
result = await self.conn.execute(
|
|
77
|
+
JobJDLs.__table__.insert().values(
|
|
78
|
+
JDL="",
|
|
79
|
+
JobRequirements="",
|
|
80
|
+
OriginalJDL=compressed_original_jdl,
|
|
81
|
+
)
|
|
82
|
+
)
|
|
83
|
+
return result.lastrowid
|
|
94
84
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
85
|
+
async def delete_jobs(self, job_ids: list[int]):
|
|
86
|
+
"""Delete jobs from the database."""
|
|
87
|
+
stmt = delete(JobJDLs).where(JobJDLs.job_id.in_(job_ids))
|
|
88
|
+
await self.conn.execute(stmt)
|
|
99
89
|
|
|
100
90
|
async def insert_input_data(self, lfns: dict[int, list[str]]):
|
|
91
|
+
"""Insert input data for jobs."""
|
|
101
92
|
await self.conn.execute(
|
|
102
93
|
InputData.__table__.insert(),
|
|
103
94
|
[
|
|
@@ -110,27 +101,8 @@ class JobDB(BaseSQLDB):
|
|
|
110
101
|
],
|
|
111
102
|
)
|
|
112
103
|
|
|
113
|
-
async def setJobAttributes(self, job_id, jobData):
|
|
114
|
-
"""TODO: add myDate and force parameters."""
|
|
115
|
-
if "Status" in jobData:
|
|
116
|
-
jobData = jobData | {"LastUpdateTime": datetime.now(tz=timezone.utc)}
|
|
117
|
-
stmt = update(Jobs).where(Jobs.JobID == job_id).values(jobData)
|
|
118
|
-
await self.conn.execute(stmt)
|
|
119
|
-
|
|
120
|
-
async def create_job(self, original_jdl):
|
|
121
|
-
"""Used to insert a new job with original JDL. Returns inserted job id."""
|
|
122
|
-
from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
|
|
123
|
-
|
|
124
|
-
result = await self.conn.execute(
|
|
125
|
-
JobJDLs.__table__.insert().values(
|
|
126
|
-
JDL="",
|
|
127
|
-
JobRequirements="",
|
|
128
|
-
OriginalJDL=compressJDL(original_jdl),
|
|
129
|
-
)
|
|
130
|
-
)
|
|
131
|
-
return result.lastrowid
|
|
132
|
-
|
|
133
104
|
async def insert_job_attributes(self, jobs_to_update: dict[int, dict]):
|
|
105
|
+
"""Insert the job attributes."""
|
|
134
106
|
await self.conn.execute(
|
|
135
107
|
Jobs.__table__.insert(),
|
|
136
108
|
[
|
|
@@ -144,8 +116,6 @@ class JobDB(BaseSQLDB):
|
|
|
144
116
|
|
|
145
117
|
async def update_job_jdls(self, jdls_to_update: dict[int, str]):
|
|
146
118
|
"""Used to update the JDL, typically just after inserting the original JDL, or rescheduling, for example."""
|
|
147
|
-
from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
|
|
148
|
-
|
|
149
119
|
await self.conn.execute(
|
|
150
120
|
JobJDLs.__table__.update().where(
|
|
151
121
|
JobJDLs.__table__.c.JobID == bindparam("b_JobID")
|
|
@@ -153,138 +123,69 @@ class JobDB(BaseSQLDB):
|
|
|
153
123
|
[
|
|
154
124
|
{
|
|
155
125
|
"b_JobID": job_id,
|
|
156
|
-
"JDL":
|
|
126
|
+
"JDL": compressed_jdl,
|
|
157
127
|
}
|
|
158
|
-
for job_id,
|
|
128
|
+
for job_id, compressed_jdl in jdls_to_update.items()
|
|
159
129
|
],
|
|
160
130
|
)
|
|
161
131
|
|
|
162
|
-
async def
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
class_ad_job,
|
|
166
|
-
class_ad_req,
|
|
167
|
-
owner,
|
|
168
|
-
owner_group,
|
|
169
|
-
job_attrs,
|
|
170
|
-
vo,
|
|
171
|
-
):
|
|
172
|
-
"""Check Consistency of Submitted JDL and set some defaults
|
|
173
|
-
Prepare subJDL with Job Requirements.
|
|
174
|
-
"""
|
|
175
|
-
from DIRAC.Core.Utilities.DErrno import EWMSSUBM, cmpError
|
|
176
|
-
from DIRAC.Core.Utilities.ReturnValues import returnValueOrRaise
|
|
177
|
-
from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import (
|
|
178
|
-
checkAndPrepareJob,
|
|
179
|
-
)
|
|
180
|
-
|
|
181
|
-
retVal = checkAndPrepareJob(
|
|
182
|
-
jobID,
|
|
183
|
-
class_ad_job,
|
|
184
|
-
class_ad_req,
|
|
185
|
-
owner,
|
|
186
|
-
owner_group,
|
|
187
|
-
job_attrs,
|
|
188
|
-
vo,
|
|
189
|
-
)
|
|
190
|
-
|
|
191
|
-
if not retVal["OK"]:
|
|
192
|
-
if cmpError(retVal, EWMSSUBM):
|
|
193
|
-
await self.setJobAttributes(jobID, job_attrs)
|
|
132
|
+
async def set_job_attributes(self, job_data):
|
|
133
|
+
"""Update the parameters of the given jobs."""
|
|
134
|
+
# TODO: add myDate and force parameters.
|
|
194
135
|
|
|
195
|
-
|
|
136
|
+
if not job_data:
|
|
137
|
+
# nothing to do!
|
|
138
|
+
raise ValueError("job_data is empty")
|
|
196
139
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
stmt = (
|
|
201
|
-
update(JobJDLs).where(JobJDLs.JobID == job_id).values(JDL=compressJDL(jdl))
|
|
202
|
-
)
|
|
203
|
-
await self.conn.execute(stmt)
|
|
204
|
-
|
|
205
|
-
async def setJobJDLsBulk(self, jdls):
|
|
206
|
-
from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
|
|
207
|
-
|
|
208
|
-
await self.conn.execute(
|
|
209
|
-
JobJDLs.__table__.update().where(
|
|
210
|
-
JobJDLs.__table__.c.JobID == bindparam("b_JobID")
|
|
211
|
-
),
|
|
212
|
-
[{"b_JobID": jid, "JDL": compressJDL(jdl)} for jid, jdl in jdls.items()],
|
|
213
|
-
)
|
|
214
|
-
|
|
215
|
-
async def setJobAttributesBulk(self, jobData):
|
|
216
|
-
"""TODO: add myDate and force parameters."""
|
|
217
|
-
for job_id in jobData.keys():
|
|
218
|
-
if "Status" in jobData[job_id]:
|
|
219
|
-
jobData[job_id].update(
|
|
140
|
+
for job_id in job_data.keys():
|
|
141
|
+
if "Status" in job_data[job_id]:
|
|
142
|
+
job_data[job_id].update(
|
|
220
143
|
{"LastUpdateTime": datetime.now(tz=timezone.utc)}
|
|
221
144
|
)
|
|
145
|
+
columns = set(key for attrs in job_data.values() for key in attrs.keys())
|
|
146
|
+
case_expressions = {
|
|
147
|
+
column: case(
|
|
148
|
+
*[
|
|
149
|
+
(
|
|
150
|
+
Jobs.__table__.c.JobID == job_id,
|
|
151
|
+
# Since the setting of the new column value is obscured by the CASE statement,
|
|
152
|
+
# ensure that SQLAlchemy renders the new column value with the correct type
|
|
153
|
+
literal(attrs[column], type_=Jobs.__table__.c[column].type)
|
|
154
|
+
if not isinstance(attrs[column], expression.FunctionElement)
|
|
155
|
+
else attrs[column],
|
|
156
|
+
)
|
|
157
|
+
for job_id, attrs in job_data.items()
|
|
158
|
+
if column in attrs
|
|
159
|
+
],
|
|
160
|
+
else_=getattr(Jobs.__table__.c, column), # Retain original value
|
|
161
|
+
)
|
|
162
|
+
for column in columns
|
|
163
|
+
}
|
|
222
164
|
|
|
223
|
-
|
|
224
|
-
Jobs.__table__.update()
|
|
225
|
-
|
|
226
|
-
)
|
|
227
|
-
[{"b_JobID": job_id, **attrs} for job_id, attrs in jobData.items()],
|
|
165
|
+
stmt = (
|
|
166
|
+
Jobs.__table__.update()
|
|
167
|
+
.values(**case_expressions)
|
|
168
|
+
.where(Jobs.__table__.c.JobID.in_(job_data.keys()))
|
|
228
169
|
)
|
|
170
|
+
await self.conn.execute(stmt)
|
|
229
171
|
|
|
230
|
-
async def
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
if original:
|
|
234
|
-
stmt = select(JobJDLs.OriginalJDL).where(JobJDLs.JobID == job_id)
|
|
235
|
-
else:
|
|
236
|
-
stmt = select(JobJDLs.JDL).where(JobJDLs.JobID == job_id)
|
|
237
|
-
|
|
238
|
-
jdl = (await self.conn.execute(stmt)).scalar_one()
|
|
239
|
-
if jdl:
|
|
240
|
-
jdl = extractJDL(jdl)
|
|
241
|
-
|
|
242
|
-
return jdl
|
|
243
|
-
|
|
244
|
-
async def getJobJDLs(self, job_ids, original: bool = False) -> dict[int | str, str]:
|
|
245
|
-
from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import extractJDL
|
|
246
|
-
|
|
172
|
+
async def get_job_jdls(self, job_ids, original: bool = False) -> dict[int, str]:
|
|
173
|
+
"""Get the JDLs for the given jobs."""
|
|
247
174
|
if original:
|
|
248
|
-
stmt = select(JobJDLs.
|
|
249
|
-
JobJDLs.
|
|
175
|
+
stmt = select(JobJDLs.job_id, JobJDLs.original_jdl).where(
|
|
176
|
+
JobJDLs.job_id.in_(job_ids)
|
|
250
177
|
)
|
|
251
178
|
else:
|
|
252
|
-
stmt = select(JobJDLs.
|
|
253
|
-
|
|
254
|
-
return {
|
|
255
|
-
jobid: extractJDL(jdl)
|
|
256
|
-
for jobid, jdl in (await self.conn.execute(stmt))
|
|
257
|
-
if jdl
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
async def get_job_status(self, job_id: int) -> LimitedJobStatusReturn:
|
|
261
|
-
try:
|
|
262
|
-
stmt = select(Jobs.Status, Jobs.MinorStatus, Jobs.ApplicationStatus).where(
|
|
263
|
-
Jobs.JobID == job_id
|
|
179
|
+
stmt = select(JobJDLs.job_id, JobJDLs.jdl).where(
|
|
180
|
+
JobJDLs.job_id.in_(job_ids)
|
|
264
181
|
)
|
|
265
|
-
return LimitedJobStatusReturn(
|
|
266
|
-
**dict((await self.conn.execute(stmt)).one()._mapping)
|
|
267
|
-
)
|
|
268
|
-
except NoResultFound as e:
|
|
269
|
-
raise JobNotFound(job_id) from e
|
|
270
182
|
|
|
271
|
-
|
|
272
|
-
"""Store a command to be passed to the job together with the next heart beat."""
|
|
273
|
-
try:
|
|
274
|
-
stmt = insert(JobCommands).values(
|
|
275
|
-
JobID=job_id,
|
|
276
|
-
Command=command,
|
|
277
|
-
Arguments=arguments,
|
|
278
|
-
ReceptionTime=datetime.now(tz=timezone.utc),
|
|
279
|
-
)
|
|
280
|
-
await self.conn.execute(stmt)
|
|
281
|
-
except IntegrityError as e:
|
|
282
|
-
raise JobNotFound(job_id) from e
|
|
183
|
+
return {jobid: jdl for jobid, jdl in (await self.conn.execute(stmt)) if jdl}
|
|
283
184
|
|
|
284
|
-
async def
|
|
185
|
+
async def set_job_commands(self, commands: list[tuple[int, str, str]]) -> None:
|
|
285
186
|
"""Store a command to be passed to the job together with the next heart beat."""
|
|
286
|
-
self.conn.execute(
|
|
287
|
-
insert(
|
|
187
|
+
await self.conn.execute(
|
|
188
|
+
JobCommands.__table__.insert(),
|
|
288
189
|
[
|
|
289
190
|
{
|
|
290
191
|
"JobID": job_id,
|
|
@@ -295,12 +196,6 @@ class JobDB(BaseSQLDB):
|
|
|
295
196
|
for job_id, command, arguments in commands
|
|
296
197
|
],
|
|
297
198
|
)
|
|
298
|
-
# FIXME handle IntegrityError
|
|
299
|
-
|
|
300
|
-
async def delete_jobs(self, job_ids: list[int]):
|
|
301
|
-
"""Delete jobs from the database."""
|
|
302
|
-
stmt = delete(JobJDLs).where(JobJDLs.JobID.in_(job_ids))
|
|
303
|
-
await self.conn.execute(stmt)
|
|
304
199
|
|
|
305
200
|
async def set_properties(
|
|
306
201
|
self, properties: dict[int, dict[str, Any]], update_timestamp: bool = False
|
|
@@ -332,7 +227,62 @@ class JobDB(BaseSQLDB):
|
|
|
332
227
|
if update_timestamp:
|
|
333
228
|
values["LastUpdateTime"] = datetime.now(tz=timezone.utc)
|
|
334
229
|
|
|
335
|
-
stmt = update(Jobs).where(Jobs.
|
|
230
|
+
stmt = update(Jobs).where(Jobs.job_id == bindparam("job_id")).values(**values)
|
|
336
231
|
rows = await self.conn.execute(stmt, update_parameters)
|
|
337
232
|
|
|
338
233
|
return rows.rowcount
|
|
234
|
+
|
|
235
|
+
async def add_heartbeat_data(
|
|
236
|
+
self, job_id: int, dynamic_data: dict[str, str]
|
|
237
|
+
) -> None:
|
|
238
|
+
"""Add the job's heartbeat data to the database.
|
|
239
|
+
|
|
240
|
+
NOTE: This does not update the HeartBeatTime column in the Jobs table.
|
|
241
|
+
This is instead handled by the `diracx.logic.jobs.status.set_job_statuses`
|
|
242
|
+
as it involves updating multiple databases.
|
|
243
|
+
|
|
244
|
+
:param job_id: the job id
|
|
245
|
+
:param dynamic_data: mapping of the dynamic data to store,
|
|
246
|
+
e.g. {"AvailableDiskSpace": 123}
|
|
247
|
+
"""
|
|
248
|
+
if extra_fields := set(dynamic_data) - self.heartbeat_fields:
|
|
249
|
+
raise InvalidQueryError(
|
|
250
|
+
f"Not allowed to store heartbeat data for: {extra_fields}. "
|
|
251
|
+
f"Allowed keys are: {self.heartbeat_fields}"
|
|
252
|
+
)
|
|
253
|
+
values = [
|
|
254
|
+
{
|
|
255
|
+
"JobID": job_id,
|
|
256
|
+
"Name": key,
|
|
257
|
+
"Value": value,
|
|
258
|
+
"HeartBeatTime": utcnow(),
|
|
259
|
+
}
|
|
260
|
+
for key, value in dynamic_data.items()
|
|
261
|
+
]
|
|
262
|
+
await self.conn.execute(HeartBeatLoggingInfo.__table__.insert().values(values))
|
|
263
|
+
|
|
264
|
+
async def get_job_commands(self, job_ids: Iterable[int]) -> list[JobCommand]:
|
|
265
|
+
"""Get a command to be passed to the job together with the next heartbeat.
|
|
266
|
+
|
|
267
|
+
:param job_ids: the job ids
|
|
268
|
+
:return: mapping of job id to list of commands
|
|
269
|
+
"""
|
|
270
|
+
# Get the commands
|
|
271
|
+
stmt = (
|
|
272
|
+
select(JobCommands.job_id, JobCommands.command, JobCommands.arguments)
|
|
273
|
+
.where(JobCommands.job_id.in_(job_ids), JobCommands.status == "Received")
|
|
274
|
+
.order_by(JobCommands.job_id)
|
|
275
|
+
)
|
|
276
|
+
commands = await self.conn.execute(stmt)
|
|
277
|
+
# Update the status of the commands
|
|
278
|
+
stmt = (
|
|
279
|
+
update(JobCommands)
|
|
280
|
+
.where(JobCommands.job_id.in_(job_ids))
|
|
281
|
+
.values(Status="Sent")
|
|
282
|
+
)
|
|
283
|
+
await self.conn.execute(stmt)
|
|
284
|
+
# Return the commands grouped by job id
|
|
285
|
+
return [
|
|
286
|
+
JobCommand(job_id=cmd.JobID, command=cmd.Command, arguments=cmd.Arguments)
|
|
287
|
+
for cmd in commands
|
|
288
|
+
]
|
diracx/db/sql/job/schema.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sqlalchemy.types as types
|
|
1
4
|
from sqlalchemy import (
|
|
2
|
-
DateTime,
|
|
3
|
-
Enum,
|
|
4
5
|
ForeignKey,
|
|
5
6
|
Index,
|
|
6
7
|
Integer,
|
|
@@ -9,44 +10,88 @@ from sqlalchemy import (
|
|
|
9
10
|
)
|
|
10
11
|
from sqlalchemy.orm import declarative_base
|
|
11
12
|
|
|
13
|
+
from diracx.db.sql.utils.types import SmarterDateTime
|
|
14
|
+
|
|
12
15
|
from ..utils import Column, EnumBackedBool, NullColumn
|
|
13
16
|
|
|
14
17
|
JobDBBase = declarative_base()
|
|
15
18
|
|
|
16
19
|
|
|
20
|
+
class AccountedFlagEnum(types.TypeDecorator):
|
|
21
|
+
"""Maps a ``AccountedFlagEnum()`` column to True/False in Python."""
|
|
22
|
+
|
|
23
|
+
impl = types.Enum("True", "False", "Failed", name="accounted_flag_enum")
|
|
24
|
+
cache_ok = True
|
|
25
|
+
|
|
26
|
+
def process_bind_param(self, value, dialect) -> str:
|
|
27
|
+
if value is True:
|
|
28
|
+
return "True"
|
|
29
|
+
elif value is False:
|
|
30
|
+
return "False"
|
|
31
|
+
elif value == "Failed":
|
|
32
|
+
return "Failed"
|
|
33
|
+
else:
|
|
34
|
+
raise NotImplementedError(value, dialect)
|
|
35
|
+
|
|
36
|
+
def process_result_value(self, value, dialect) -> bool | str:
|
|
37
|
+
if value == "True":
|
|
38
|
+
return True
|
|
39
|
+
elif value == "False":
|
|
40
|
+
return False
|
|
41
|
+
elif value == "Failed":
|
|
42
|
+
return "Failed"
|
|
43
|
+
else:
|
|
44
|
+
raise NotImplementedError(f"Unknown {value=}")
|
|
45
|
+
|
|
46
|
+
|
|
17
47
|
class Jobs(JobDBBase):
|
|
18
48
|
__tablename__ = "Jobs"
|
|
19
49
|
|
|
20
|
-
|
|
50
|
+
job_id = Column(
|
|
21
51
|
"JobID",
|
|
22
52
|
Integer,
|
|
23
53
|
ForeignKey("JobJDLs.JobID", ondelete="CASCADE"),
|
|
24
54
|
primary_key=True,
|
|
25
55
|
default=0,
|
|
26
56
|
)
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
VerifiedFlag = Column("VerifiedFlag", EnumBackedBool(), default=False)
|
|
46
|
-
# TODO: Should this be True/False/"Failed"? Or True/False/Null?
|
|
47
|
-
AccountedFlag = Column(
|
|
48
|
-
"AccountedFlag", Enum("True", "False", "Failed"), default="False"
|
|
57
|
+
job_type = Column("JobType", String(32), default="user")
|
|
58
|
+
job_group = Column("JobGroup", String(32), default="00000000")
|
|
59
|
+
site = Column("Site", String(100), default="ANY")
|
|
60
|
+
job_name = Column("JobName", String(128), default="Unknown")
|
|
61
|
+
owner = Column("Owner", String(64), default="Unknown")
|
|
62
|
+
owner_group = Column("OwnerGroup", String(128), default="Unknown")
|
|
63
|
+
vo = Column("VO", String(32))
|
|
64
|
+
submission_time = NullColumn(
|
|
65
|
+
"SubmissionTime",
|
|
66
|
+
SmarterDateTime(),
|
|
67
|
+
)
|
|
68
|
+
reschedule_time = NullColumn(
|
|
69
|
+
"RescheduleTime",
|
|
70
|
+
SmarterDateTime(),
|
|
71
|
+
)
|
|
72
|
+
last_update_time = NullColumn(
|
|
73
|
+
"LastUpdateTime",
|
|
74
|
+
SmarterDateTime(),
|
|
49
75
|
)
|
|
76
|
+
start_exec_time = NullColumn(
|
|
77
|
+
"StartExecTime",
|
|
78
|
+
SmarterDateTime(),
|
|
79
|
+
)
|
|
80
|
+
heart_beat_time = NullColumn(
|
|
81
|
+
"HeartBeatTime",
|
|
82
|
+
SmarterDateTime(),
|
|
83
|
+
)
|
|
84
|
+
end_exec_time = NullColumn(
|
|
85
|
+
"EndExecTime",
|
|
86
|
+
SmarterDateTime(),
|
|
87
|
+
)
|
|
88
|
+
status = Column("Status", String(32), default="Received")
|
|
89
|
+
minor_status = Column("MinorStatus", String(128), default="Unknown")
|
|
90
|
+
application_status = Column("ApplicationStatus", String(255), default="Unknown")
|
|
91
|
+
user_priority = Column("UserPriority", Integer, default=0)
|
|
92
|
+
reschedule_counter = Column("RescheduleCounter", Integer, default=0)
|
|
93
|
+
verified_flag = Column("VerifiedFlag", EnumBackedBool(), default=False)
|
|
94
|
+
accounted_flag = Column("AccountedFlag", AccountedFlagEnum(), default=False)
|
|
50
95
|
|
|
51
96
|
__table_args__ = (
|
|
52
97
|
Index("JobType", "JobType"),
|
|
@@ -64,66 +109,77 @@ class Jobs(JobDBBase):
|
|
|
64
109
|
|
|
65
110
|
class JobJDLs(JobDBBase):
|
|
66
111
|
__tablename__ = "JobJDLs"
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
112
|
+
job_id = Column("JobID", Integer, autoincrement=True, primary_key=True)
|
|
113
|
+
jdl = Column("JDL", Text)
|
|
114
|
+
job_requirements = Column("JobRequirements", Text)
|
|
115
|
+
original_jdl = Column("OriginalJDL", Text)
|
|
71
116
|
|
|
72
117
|
|
|
73
118
|
class InputData(JobDBBase):
|
|
74
119
|
__tablename__ = "InputData"
|
|
75
|
-
|
|
76
|
-
Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
120
|
+
job_id = Column(
|
|
121
|
+
"JobID", Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
77
122
|
)
|
|
78
|
-
|
|
79
|
-
|
|
123
|
+
lfn = Column("LFN", String(255), default="", primary_key=True)
|
|
124
|
+
status = Column("Status", String(32), default="AprioriGood")
|
|
80
125
|
|
|
81
126
|
|
|
82
127
|
class JobParameters(JobDBBase):
|
|
83
128
|
__tablename__ = "JobParameters"
|
|
84
|
-
|
|
85
|
-
Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
129
|
+
job_id = Column(
|
|
130
|
+
"JobID", Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
86
131
|
)
|
|
87
|
-
|
|
88
|
-
|
|
132
|
+
name = Column("Name", String(100), primary_key=True)
|
|
133
|
+
value = Column("Value", Text)
|
|
89
134
|
|
|
90
135
|
|
|
91
136
|
class OptimizerParameters(JobDBBase):
|
|
92
137
|
__tablename__ = "OptimizerParameters"
|
|
93
|
-
|
|
94
|
-
Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
138
|
+
job_id = Column(
|
|
139
|
+
"JobID", Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
95
140
|
)
|
|
96
|
-
|
|
97
|
-
|
|
141
|
+
name = Column("Name", String(100), primary_key=True)
|
|
142
|
+
value = Column("Value", Text)
|
|
98
143
|
|
|
99
144
|
|
|
100
145
|
class AtticJobParameters(JobDBBase):
|
|
101
146
|
__tablename__ = "AtticJobParameters"
|
|
102
|
-
|
|
103
|
-
Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
147
|
+
job_id = Column(
|
|
148
|
+
"JobID", Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
104
149
|
)
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
150
|
+
name = Column("Name", String(100), primary_key=True)
|
|
151
|
+
value = Column("Value", Text)
|
|
152
|
+
reschedule_cycle = Column("RescheduleCycle", Integer)
|
|
108
153
|
|
|
109
154
|
|
|
110
155
|
class HeartBeatLoggingInfo(JobDBBase):
|
|
111
156
|
__tablename__ = "HeartBeatLoggingInfo"
|
|
112
|
-
|
|
113
|
-
Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
157
|
+
job_id = Column(
|
|
158
|
+
"JobID", Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
159
|
+
)
|
|
160
|
+
name = Column("Name", String(100), primary_key=True)
|
|
161
|
+
value = Column("Value", Text)
|
|
162
|
+
heart_beat_time = Column(
|
|
163
|
+
"HeartBeatTime",
|
|
164
|
+
SmarterDateTime(),
|
|
165
|
+
primary_key=True,
|
|
114
166
|
)
|
|
115
|
-
Name = Column(String(100), primary_key=True)
|
|
116
|
-
Value = Column(Text)
|
|
117
|
-
HeartBeatTime = Column(DateTime, primary_key=True)
|
|
118
167
|
|
|
119
168
|
|
|
120
169
|
class JobCommands(JobDBBase):
|
|
121
170
|
__tablename__ = "JobCommands"
|
|
122
|
-
|
|
123
|
-
Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
124
|
-
)
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
171
|
+
job_id = Column(
|
|
172
|
+
"JobID", Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
|
|
173
|
+
)
|
|
174
|
+
command = Column("Command", String(100))
|
|
175
|
+
arguments = Column("Arguments", String(100))
|
|
176
|
+
status = Column("Status", String(64), default="Received")
|
|
177
|
+
reception_time = Column(
|
|
178
|
+
"ReceptionTime",
|
|
179
|
+
SmarterDateTime(),
|
|
180
|
+
primary_key=True,
|
|
181
|
+
)
|
|
182
|
+
execution_time = NullColumn(
|
|
183
|
+
"ExecutionTime",
|
|
184
|
+
SmarterDateTime(),
|
|
185
|
+
)
|