diracx-db 0.0.1a19__tar.gz → 0.0.1a21__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (55) hide show
  1. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/PKG-INFO +2 -2
  2. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/pyproject.toml +1 -1
  3. diracx_db-0.0.1a21/src/diracx/db/sql/job/db.py +338 -0
  4. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/job_logging/db.py +74 -0
  5. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/utils/__init__.py +11 -3
  6. diracx_db-0.0.1a21/src/diracx/db/sql/utils/job.py +574 -0
  7. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx_db.egg-info/PKG-INFO +2 -2
  8. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx_db.egg-info/SOURCES.txt +1 -1
  9. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx_db.egg-info/requires.txt +1 -1
  10. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/jobs/test_jobDB.py +45 -42
  11. diracx_db-0.0.1a19/src/diracx/db/sql/job/db.py +0 -492
  12. diracx_db-0.0.1a19/src/diracx/db/sql/utils/job_status.py +0 -302
  13. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/README.md +0 -0
  14. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/setup.cfg +0 -0
  15. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/__init__.py +0 -0
  16. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/__main__.py +0 -0
  17. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/exceptions.py +0 -0
  18. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/os/__init__.py +0 -0
  19. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/os/job_parameters.py +0 -0
  20. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/os/utils.py +0 -0
  21. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/py.typed +0 -0
  22. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/__init__.py +0 -0
  23. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/auth/__init__.py +0 -0
  24. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/auth/db.py +0 -0
  25. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/auth/schema.py +0 -0
  26. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/dummy/__init__.py +0 -0
  27. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/dummy/db.py +0 -0
  28. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/dummy/schema.py +0 -0
  29. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/job/__init__.py +0 -0
  30. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/job/schema.py +0 -0
  31. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/job_logging/__init__.py +0 -0
  32. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/job_logging/schema.py +0 -0
  33. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/pilot_agents/__init__.py +0 -0
  34. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/pilot_agents/db.py +0 -0
  35. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/pilot_agents/schema.py +0 -0
  36. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/sandbox_metadata/__init__.py +0 -0
  37. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/sandbox_metadata/db.py +0 -0
  38. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/sandbox_metadata/schema.py +0 -0
  39. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/task_queue/__init__.py +0 -0
  40. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/task_queue/db.py +0 -0
  41. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx/db/sql/task_queue/schema.py +0 -0
  42. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx_db.egg-info/dependency_links.txt +0 -0
  43. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx_db.egg-info/entry_points.txt +0 -0
  44. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/src/diracx_db.egg-info/top_level.txt +0 -0
  45. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/auth/test_authorization_flow.py +0 -0
  46. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/auth/test_device_flow.py +0 -0
  47. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/auth/test_refresh_token.py +0 -0
  48. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/jobs/test_jobLoggingDB.py +0 -0
  49. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/jobs/test_sandbox_metadata.py +0 -0
  50. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/opensearch/test_connection.py +0 -0
  51. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/opensearch/test_index_template.py +0 -0
  52. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/opensearch/test_search.py +0 -0
  53. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/pilot_agents/__init__.py +0 -0
  54. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/pilot_agents/test_pilotAgentsDB.py +0 -0
  55. {diracx_db-0.0.1a19 → diracx_db-0.0.1a21}/tests/test_dummyDB.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: diracx-db
3
- Version: 0.0.1a19
3
+ Version: 0.0.1a21
4
4
  Summary: TODO
5
5
  License: GPL-3.0-only
6
6
  Classifier: Intended Audience :: Science/Research
@@ -14,7 +14,7 @@ Requires-Dist: dirac
14
14
  Requires-Dist: diracx-core
15
15
  Requires-Dist: fastapi
16
16
  Requires-Dist: opensearch-py[async]
17
- Requires-Dist: pydantic>=2.4
17
+ Requires-Dist: pydantic>=2.10
18
18
  Requires-Dist: sqlalchemy[aiomysql,aiosqlite]>=2
19
19
  Provides-Extra: testing
20
20
  Requires-Dist: diracx-testing; extra == "testing"
@@ -17,7 +17,7 @@ dependencies = [
17
17
  "diracx-core",
18
18
  "fastapi",
19
19
  "opensearch-py[async]",
20
- "pydantic >=2.4",
20
+ "pydantic >=2.10",
21
21
  "sqlalchemy[aiomysql,aiosqlite] >= 2",
22
22
  ]
23
23
  dynamic = ["version"]
@@ -0,0 +1,338 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime, timezone
4
+ from typing import TYPE_CHECKING, Any
5
+
6
+ from sqlalchemy import bindparam, delete, func, insert, select, update
7
+ from sqlalchemy.exc import IntegrityError, NoResultFound
8
+
9
+ if TYPE_CHECKING:
10
+ from sqlalchemy.sql.elements import BindParameter
11
+ from diracx.core.exceptions import InvalidQueryError, JobNotFound
12
+ from diracx.core.models import (
13
+ LimitedJobStatusReturn,
14
+ SearchSpec,
15
+ SortSpec,
16
+ )
17
+
18
+ from ..utils import BaseSQLDB, apply_search_filters, apply_sort_constraints
19
+ from .schema import (
20
+ InputData,
21
+ JobCommands,
22
+ JobDBBase,
23
+ JobJDLs,
24
+ Jobs,
25
+ )
26
+
27
+
28
+ def _get_columns(table, parameters):
29
+ columns = [x for x in table.columns]
30
+ if parameters:
31
+ if unrecognised_parameters := set(parameters) - set(table.columns.keys()):
32
+ raise InvalidQueryError(
33
+ f"Unrecognised parameters requested {unrecognised_parameters}"
34
+ )
35
+ columns = [c for c in columns if c.name in parameters]
36
+ return columns
37
+
38
+
39
+ class JobDB(BaseSQLDB):
40
+ metadata = JobDBBase.metadata
41
+
42
+ # TODO: this is copied from the DIRAC JobDB
43
+ # but is overwriten in LHCbDIRAC, so we need
44
+ # to find a way to make it dynamic
45
+ jdl2DBParameters = ["JobName", "JobType", "JobGroup"]
46
+
47
+ async def summary(self, group_by, search) -> list[dict[str, str | int]]:
48
+ columns = _get_columns(Jobs.__table__, group_by)
49
+
50
+ stmt = select(*columns, func.count(Jobs.JobID).label("count"))
51
+ stmt = apply_search_filters(Jobs.__table__.columns.__getitem__, stmt, search)
52
+ stmt = stmt.group_by(*columns)
53
+
54
+ # Execute the query
55
+ return [
56
+ dict(row._mapping)
57
+ async for row in (await self.conn.stream(stmt))
58
+ if row.count > 0 # type: ignore
59
+ ]
60
+
61
+ async def search(
62
+ self,
63
+ parameters: list[str] | None,
64
+ search: list[SearchSpec],
65
+ sorts: list[SortSpec],
66
+ *,
67
+ distinct: bool = False,
68
+ per_page: int = 100,
69
+ page: int | None = None,
70
+ ) -> tuple[int, list[dict[Any, Any]]]:
71
+ # Find which columns to select
72
+ columns = _get_columns(Jobs.__table__, parameters)
73
+
74
+ stmt = select(*columns)
75
+
76
+ stmt = apply_search_filters(Jobs.__table__.columns.__getitem__, stmt, search)
77
+ stmt = apply_sort_constraints(Jobs.__table__.columns.__getitem__, stmt, sorts)
78
+
79
+ if distinct:
80
+ stmt = stmt.distinct()
81
+
82
+ # Calculate total count before applying pagination
83
+ total_count_subquery = stmt.alias()
84
+ total_count_stmt = select(func.count()).select_from(total_count_subquery)
85
+ total = (await self.conn.execute(total_count_stmt)).scalar_one()
86
+
87
+ # Apply pagination
88
+ if page is not None:
89
+ if page < 1:
90
+ raise InvalidQueryError("Page must be a positive integer")
91
+ if per_page < 1:
92
+ raise InvalidQueryError("Per page must be a positive integer")
93
+ stmt = stmt.offset((page - 1) * per_page).limit(per_page)
94
+
95
+ # Execute the query
96
+ return total, [
97
+ dict(row._mapping) async for row in (await self.conn.stream(stmt))
98
+ ]
99
+
100
+ async def insert_input_data(self, lfns: dict[int, list[str]]):
101
+ await self.conn.execute(
102
+ InputData.__table__.insert(),
103
+ [
104
+ {
105
+ "JobID": job_id,
106
+ "LFN": lfn,
107
+ }
108
+ for job_id, lfns_ in lfns.items()
109
+ for lfn in lfns_
110
+ ],
111
+ )
112
+
113
+ async def setJobAttributes(self, job_id, jobData):
114
+ """TODO: add myDate and force parameters."""
115
+ if "Status" in jobData:
116
+ jobData = jobData | {"LastUpdateTime": datetime.now(tz=timezone.utc)}
117
+ stmt = update(Jobs).where(Jobs.JobID == job_id).values(jobData)
118
+ await self.conn.execute(stmt)
119
+
120
+ async def create_job(self, original_jdl):
121
+ """Used to insert a new job with original JDL. Returns inserted job id."""
122
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
123
+
124
+ result = await self.conn.execute(
125
+ JobJDLs.__table__.insert().values(
126
+ JDL="",
127
+ JobRequirements="",
128
+ OriginalJDL=compressJDL(original_jdl),
129
+ )
130
+ )
131
+ return result.lastrowid
132
+
133
+ async def insert_job_attributes(self, jobs_to_update: dict[int, dict]):
134
+ await self.conn.execute(
135
+ Jobs.__table__.insert(),
136
+ [
137
+ {
138
+ "JobID": job_id,
139
+ **attrs,
140
+ }
141
+ for job_id, attrs in jobs_to_update.items()
142
+ ],
143
+ )
144
+
145
+ async def update_job_jdls(self, jdls_to_update: dict[int, str]):
146
+ """Used to update the JDL, typically just after inserting the original JDL, or rescheduling, for example."""
147
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
148
+
149
+ await self.conn.execute(
150
+ JobJDLs.__table__.update().where(
151
+ JobJDLs.__table__.c.JobID == bindparam("b_JobID")
152
+ ),
153
+ [
154
+ {
155
+ "b_JobID": job_id,
156
+ "JDL": compressJDL(jdl),
157
+ }
158
+ for job_id, jdl in jdls_to_update.items()
159
+ ],
160
+ )
161
+
162
+ async def checkAndPrepareJob(
163
+ self,
164
+ jobID,
165
+ class_ad_job,
166
+ class_ad_req,
167
+ owner,
168
+ owner_group,
169
+ job_attrs,
170
+ vo,
171
+ ):
172
+ """Check Consistency of Submitted JDL and set some defaults
173
+ Prepare subJDL with Job Requirements.
174
+ """
175
+ from DIRAC.Core.Utilities.DErrno import EWMSSUBM, cmpError
176
+ from DIRAC.Core.Utilities.ReturnValues import returnValueOrRaise
177
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import (
178
+ checkAndPrepareJob,
179
+ )
180
+
181
+ retVal = checkAndPrepareJob(
182
+ jobID,
183
+ class_ad_job,
184
+ class_ad_req,
185
+ owner,
186
+ owner_group,
187
+ job_attrs,
188
+ vo,
189
+ )
190
+
191
+ if not retVal["OK"]:
192
+ if cmpError(retVal, EWMSSUBM):
193
+ await self.setJobAttributes(jobID, job_attrs)
194
+
195
+ returnValueOrRaise(retVal)
196
+
197
+ async def setJobJDL(self, job_id, jdl):
198
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
199
+
200
+ stmt = (
201
+ update(JobJDLs).where(JobJDLs.JobID == job_id).values(JDL=compressJDL(jdl))
202
+ )
203
+ await self.conn.execute(stmt)
204
+
205
+ async def setJobJDLsBulk(self, jdls):
206
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
207
+
208
+ await self.conn.execute(
209
+ JobJDLs.__table__.update().where(
210
+ JobJDLs.__table__.c.JobID == bindparam("b_JobID")
211
+ ),
212
+ [{"b_JobID": jid, "JDL": compressJDL(jdl)} for jid, jdl in jdls.items()],
213
+ )
214
+
215
+ async def setJobAttributesBulk(self, jobData):
216
+ """TODO: add myDate and force parameters."""
217
+ for job_id in jobData.keys():
218
+ if "Status" in jobData[job_id]:
219
+ jobData[job_id].update(
220
+ {"LastUpdateTime": datetime.now(tz=timezone.utc)}
221
+ )
222
+
223
+ await self.conn.execute(
224
+ Jobs.__table__.update().where(
225
+ Jobs.__table__.c.JobID == bindparam("b_JobID")
226
+ ),
227
+ [{"b_JobID": job_id, **attrs} for job_id, attrs in jobData.items()],
228
+ )
229
+
230
+ async def getJobJDL(self, job_id: int, original: bool = False) -> str:
231
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import extractJDL
232
+
233
+ if original:
234
+ stmt = select(JobJDLs.OriginalJDL).where(JobJDLs.JobID == job_id)
235
+ else:
236
+ stmt = select(JobJDLs.JDL).where(JobJDLs.JobID == job_id)
237
+
238
+ jdl = (await self.conn.execute(stmt)).scalar_one()
239
+ if jdl:
240
+ jdl = extractJDL(jdl)
241
+
242
+ return jdl
243
+
244
+ async def getJobJDLs(self, job_ids, original: bool = False) -> dict[int | str, str]:
245
+ from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import extractJDL
246
+
247
+ if original:
248
+ stmt = select(JobJDLs.JobID, JobJDLs.OriginalJDL).where(
249
+ JobJDLs.JobID.in_(job_ids)
250
+ )
251
+ else:
252
+ stmt = select(JobJDLs.JobID, JobJDLs.JDL).where(JobJDLs.JobID.in_(job_ids))
253
+
254
+ return {
255
+ jobid: extractJDL(jdl)
256
+ for jobid, jdl in (await self.conn.execute(stmt))
257
+ if jdl
258
+ }
259
+
260
+ async def get_job_status(self, job_id: int) -> LimitedJobStatusReturn:
261
+ try:
262
+ stmt = select(Jobs.Status, Jobs.MinorStatus, Jobs.ApplicationStatus).where(
263
+ Jobs.JobID == job_id
264
+ )
265
+ return LimitedJobStatusReturn(
266
+ **dict((await self.conn.execute(stmt)).one()._mapping)
267
+ )
268
+ except NoResultFound as e:
269
+ raise JobNotFound(job_id) from e
270
+
271
+ async def set_job_command(self, job_id: int, command: str, arguments: str = ""):
272
+ """Store a command to be passed to the job together with the next heart beat."""
273
+ try:
274
+ stmt = insert(JobCommands).values(
275
+ JobID=job_id,
276
+ Command=command,
277
+ Arguments=arguments,
278
+ ReceptionTime=datetime.now(tz=timezone.utc),
279
+ )
280
+ await self.conn.execute(stmt)
281
+ except IntegrityError as e:
282
+ raise JobNotFound(job_id) from e
283
+
284
+ async def set_job_command_bulk(self, commands):
285
+ """Store a command to be passed to the job together with the next heart beat."""
286
+ self.conn.execute(
287
+ insert(JobCommands),
288
+ [
289
+ {
290
+ "JobID": job_id,
291
+ "Command": command,
292
+ "Arguments": arguments,
293
+ "ReceptionTime": datetime.now(tz=timezone.utc),
294
+ }
295
+ for job_id, command, arguments in commands
296
+ ],
297
+ )
298
+ # FIXME handle IntegrityError
299
+
300
+ async def delete_jobs(self, job_ids: list[int]):
301
+ """Delete jobs from the database."""
302
+ stmt = delete(JobJDLs).where(JobJDLs.JobID.in_(job_ids))
303
+ await self.conn.execute(stmt)
304
+
305
+ async def set_properties(
306
+ self, properties: dict[int, dict[str, Any]], update_timestamp: bool = False
307
+ ) -> int:
308
+ """Update the job parameters
309
+ All the jobs must update the same properties.
310
+
311
+ :param properties: {job_id : {prop1: val1, prop2:val2}
312
+ :param update_timestamp: if True, update the LastUpdate to now
313
+
314
+ :return rowcount
315
+
316
+ """
317
+ # Check that all we always update the same set of properties
318
+ required_parameters_set = {tuple(sorted(k.keys())) for k in properties.values()}
319
+
320
+ if len(required_parameters_set) != 1:
321
+ raise NotImplementedError(
322
+ "All the jobs should update the same set of properties"
323
+ )
324
+
325
+ required_parameters = list(required_parameters_set)[0]
326
+ update_parameters = [{"job_id": k, **v} for k, v in properties.items()]
327
+
328
+ columns = _get_columns(Jobs.__table__, required_parameters)
329
+ values: dict[str, BindParameter[Any] | datetime] = {
330
+ c.name: bindparam(c.name) for c in columns
331
+ }
332
+ if update_timestamp:
333
+ values["LastUpdateTime"] = datetime.now(tz=timezone.utc)
334
+
335
+ stmt = update(Jobs).where(Jobs.JobID == bindparam("job_id")).values(**values)
336
+ rows = await self.conn.execute(stmt, update_parameters)
337
+
338
+ return rows.rowcount
@@ -4,11 +4,14 @@ import time
4
4
  from datetime import datetime, timezone
5
5
  from typing import TYPE_CHECKING
6
6
 
7
+ from pydantic import BaseModel
7
8
  from sqlalchemy import delete, func, insert, select
8
9
 
9
10
  if TYPE_CHECKING:
10
11
  pass
11
12
 
13
+ from collections import defaultdict
14
+
12
15
  from diracx.core.exceptions import JobNotFound
13
16
  from diracx.core.models import (
14
17
  JobStatus,
@@ -24,6 +27,15 @@ from .schema import (
24
27
  MAGIC_EPOC_NUMBER = 1270000000
25
28
 
26
29
 
30
+ class JobLoggingRecord(BaseModel):
31
+ job_id: int
32
+ status: JobStatus
33
+ minor_status: str
34
+ application_status: str
35
+ date: datetime
36
+ source: str
37
+
38
+
27
39
  class JobLoggingDB(BaseSQLDB):
28
40
  """Frontend for the JobLoggingDB. Provides the ability to store changes with timestamps."""
29
41
 
@@ -69,6 +81,49 @@ class JobLoggingDB(BaseSQLDB):
69
81
  )
70
82
  await self.conn.execute(stmt)
71
83
 
84
+ async def bulk_insert_record(
85
+ self,
86
+ records: list[JobLoggingRecord],
87
+ ):
88
+ """Bulk insert entries to the JobLoggingDB table."""
89
+
90
+ def get_epoc(date):
91
+ return (
92
+ time.mktime(date.timetuple())
93
+ + date.microsecond / 1000000.0
94
+ - MAGIC_EPOC_NUMBER
95
+ )
96
+
97
+ # First, fetch the maximum SeqNums for the given job_ids
98
+ seqnum_stmt = (
99
+ select(
100
+ LoggingInfo.JobID, func.coalesce(func.max(LoggingInfo.SeqNum) + 1, 1)
101
+ )
102
+ .where(LoggingInfo.JobID.in_([record.job_id for record in records]))
103
+ .group_by(LoggingInfo.JobID)
104
+ )
105
+
106
+ seqnum = {jid: seqnum for jid, seqnum in (await self.conn.execute(seqnum_stmt))}
107
+ # IF a seqnum is not found, then assume it does not exist and the first sequence number is 1.
108
+
109
+ # https://docs.sqlalchemy.org/en/20/orm/queryguide/dml.html#orm-bulk-insert-statements
110
+ await self.conn.execute(
111
+ insert(LoggingInfo),
112
+ [
113
+ {
114
+ "JobID": record.job_id,
115
+ "SeqNum": seqnum.get(record.job_id, 1),
116
+ "Status": record.status,
117
+ "MinorStatus": record.minor_status,
118
+ "ApplicationStatus": record.application_status[:255],
119
+ "StatusTime": record.date,
120
+ "StatusTimeOrder": get_epoc(record.date),
121
+ "Source": record.source[:32],
122
+ }
123
+ for record in records
124
+ ],
125
+ )
126
+
72
127
  async def get_records(self, job_id: int) -> list[JobStatusReturn]:
73
128
  """Returns a Status,MinorStatus,ApplicationStatus,StatusTime,Source tuple
74
129
  for each record found for job specified by its jobID in historical order.
@@ -159,3 +214,22 @@ class JobLoggingDB(BaseSQLDB):
159
214
  result[event] = str(etime + MAGIC_EPOC_NUMBER)
160
215
 
161
216
  return result
217
+
218
+ async def get_wms_time_stamps_bulk(self, job_ids):
219
+ """Get TimeStamps for job MajorState transitions for multiple jobs at once
220
+ return a {JobID: {State:timestamp}} dictionary.
221
+ """
222
+ result = defaultdict(dict)
223
+ stmt = select(
224
+ LoggingInfo.JobID,
225
+ LoggingInfo.Status,
226
+ LoggingInfo.StatusTimeOrder,
227
+ ).where(LoggingInfo.JobID.in_(job_ids))
228
+ rows = await self.conn.execute(stmt)
229
+ if not rows.rowcount:
230
+ return {}
231
+
232
+ for job_id, event, etime in rows:
233
+ result[job_id][event] = str(etime + MAGIC_EPOC_NUMBER)
234
+
235
+ return result
@@ -16,7 +16,7 @@ from typing import TYPE_CHECKING, Self, cast
16
16
  import sqlalchemy.types as types
17
17
  from pydantic import TypeAdapter
18
18
  from sqlalchemy import Column as RawColumn
19
- from sqlalchemy import DateTime, Enum, MetaData, select
19
+ from sqlalchemy import DateTime, Enum, MetaData, func, select
20
20
  from sqlalchemy.exc import OperationalError
21
21
  from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
22
22
  from sqlalchemy.ext.compiler import compiles
@@ -100,7 +100,9 @@ def mysql_date_trunc(element, compiler, **kw):
100
100
  "MONTH": "%Y-%m",
101
101
  "YEAR": "%Y",
102
102
  }[element._time_resolution]
103
- return f"DATE_FORMAT({compiler.process(element.clauses)}, '{pattern}')"
103
+
104
+ (dt_col,) = list(element.clauses)
105
+ return compiler.process(func.date_format(dt_col, pattern))
104
106
 
105
107
 
106
108
  @compiles(date_trunc, "sqlite")
@@ -113,7 +115,13 @@ def sqlite_date_trunc(element, compiler, **kw):
113
115
  "MONTH": "%Y-%m",
114
116
  "YEAR": "%Y",
115
117
  }[element._time_resolution]
116
- return f"strftime('{pattern}', {compiler.process(element.clauses)})"
118
+ (dt_col,) = list(element.clauses)
119
+ return compiler.process(
120
+ func.strftime(
121
+ pattern,
122
+ dt_col,
123
+ )
124
+ )
117
125
 
118
126
 
119
127
  def substract_date(**kwargs: float) -> datetime: