diracx-db 0.0.1a6__py3-none-any.whl → 0.0.1a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
diracx/db/sql/jobs/db.py CHANGED
@@ -3,11 +3,14 @@ from __future__ import annotations
3
3
  import logging
4
4
  import time
5
5
  from datetime import datetime, timezone
6
- from typing import Any
6
+ from typing import TYPE_CHECKING, Any
7
7
 
8
- from sqlalchemy import delete, func, insert, select, update
8
+ from sqlalchemy import bindparam, delete, func, insert, select, update
9
9
  from sqlalchemy.exc import IntegrityError, NoResultFound
10
10
 
11
+ if TYPE_CHECKING:
12
+ from sqlalchemy.sql.elements import BindParameter
13
+
11
14
  from diracx.core.exceptions import InvalidQueryError, JobNotFound
12
15
  from diracx.core.models import (
13
16
  JobMinorStatus,
@@ -474,6 +477,44 @@ class JobDB(BaseSQLDB):
474
477
  stmt = delete(JobJDLs).where(JobJDLs.JobID.in_(job_ids))
475
478
  await self.conn.execute(stmt)
476
479
 
480
+ async def set_properties(
481
+ self, properties: dict[int, dict[str, Any]], update_timestamp: bool = False
482
+ ) -> int:
483
+ """Update the job parameters
484
+ All the jobs must update the same properties
485
+
486
+ :param properties: {job_id : {prop1: val1, prop2:val2}
487
+ :param update_timestamp: if True, update the LastUpdate to now
488
+
489
+ :return rowcount
490
+
491
+ """
492
+
493
+ # Check that all we always update the same set of properties
494
+ required_parameters_set = set(
495
+ [tuple(sorted(k.keys())) for k in properties.values()]
496
+ )
497
+
498
+ if len(required_parameters_set) != 1:
499
+ raise NotImplementedError(
500
+ "All the jobs should update the same set of properties"
501
+ )
502
+
503
+ required_parameters = list(required_parameters_set)[0]
504
+ update_parameters = [{"job_id": k, **v} for k, v in properties.items()]
505
+
506
+ columns = _get_columns(Jobs.__table__, required_parameters)
507
+ values: dict[str, BindParameter[Any] | datetime] = {
508
+ c.name: bindparam(c.name) for c in columns
509
+ }
510
+ if update_timestamp:
511
+ values["LastUpdateTime"] = datetime.now(tz=timezone.utc)
512
+
513
+ stmt = update(Jobs).where(Jobs.JobID == bindparam("job_id")).values(**values)
514
+ rows = await self.conn.execute(stmt, update_parameters)
515
+
516
+ return rows.rowcount
517
+
477
518
 
478
519
  MAGIC_EPOC_NUMBER = 1270000000
479
520
 
@@ -521,12 +562,12 @@ class JobLoggingDB(BaseSQLDB):
521
562
  ApplicationStatus=application_status[:255],
522
563
  StatusTime=date,
523
564
  StatusTimeOrder=epoc,
524
- StatusSource=source[:32],
565
+ Source=source[:32],
525
566
  )
526
567
  await self.conn.execute(stmt)
527
568
 
528
569
  async def get_records(self, job_id: int) -> list[JobStatusReturn]:
529
- """Returns a Status,MinorStatus,ApplicationStatus,StatusTime,StatusSource tuple
570
+ """Returns a Status,MinorStatus,ApplicationStatus,StatusTime,Source tuple
530
571
  for each record found for job specified by its jobID in historical order
531
572
  """
532
573
 
@@ -536,7 +577,7 @@ class JobLoggingDB(BaseSQLDB):
536
577
  LoggingInfo.MinorStatus,
537
578
  LoggingInfo.ApplicationStatus,
538
579
  LoggingInfo.StatusTime,
539
- LoggingInfo.StatusSource,
580
+ LoggingInfo.Source,
540
581
  )
541
582
  .where(LoggingInfo.JobID == int(job_id))
542
583
  .order_by(LoggingInfo.StatusTimeOrder, LoggingInfo.StatusTime)
@@ -588,7 +629,7 @@ class JobLoggingDB(BaseSQLDB):
588
629
  MinorStatus=minor_status,
589
630
  ApplicationStatus=application_status,
590
631
  StatusTime=status_time,
591
- StatusSource=status_source,
632
+ Source=status_source,
592
633
  )
593
634
  )
594
635
 
@@ -204,7 +204,7 @@ class LoggingInfo(JobLoggingDBBase):
204
204
  StatusTime = DateNowColumn()
205
205
  # TODO: Check that this corresponds to the DOUBLE(12,3) type in MySQL
206
206
  StatusTimeOrder = Column(Numeric(precision=12, scale=3), default=0)
207
- StatusSource = Column(String(32), default="Unknown")
207
+ Source = Column(String(32), default="Unknown", name="StatusSource")
208
208
  __table_args__ = (PrimaryKeyConstraint("JobID", "SeqNum"),)
209
209
 
210
210
 
@@ -41,7 +41,7 @@ async def set_job_status(
41
41
  # transform JobStateUpdate objects into dicts
42
42
  statusDict = {}
43
43
  for key, value in status.items():
44
- statusDict[key] = value.dict(by_alias=True)
44
+ statusDict[key] = {k: v for k, v in value.dict().items() if v is not None}
45
45
 
46
46
  res = await job_db.search(
47
47
  parameters=["Status", "StartExecTime", "EndExecTime"],
@@ -110,7 +110,7 @@ async def set_job_status(
110
110
  # return result
111
111
 
112
112
  for updTime in updateTimes:
113
- if statusDict[updTime]["StatusSource"].startswith("Job"):
113
+ if statusDict[updTime]["Source"].startswith("Job"):
114
114
  job_data["HeartBeatTime"] = updTime
115
115
 
116
116
  if not startTime and newStartTime:
@@ -122,22 +122,16 @@ async def set_job_status(
122
122
  if job_data:
123
123
  await job_db.setJobAttributes(job_id, job_data)
124
124
 
125
- # Update the JobLoggingDB records
126
- # TODO: Because I really didn't liked the fact that the input field is called "Source"
127
- # and the output field is called "StatusSource"
128
- # I changed the name of the input field to "StatusSource"
129
- # Meaning this change must be added to the transformation layer for DIRAC.
130
-
131
125
  for updTime in updateTimes:
132
126
  sDict = statusDict[updTime]
133
- if not sDict["Status"]:
127
+ if not sDict.get("Status"):
134
128
  sDict["Status"] = "idem"
135
- if not sDict["MinorStatus"]:
129
+ if not sDict.get("MinorStatus"):
136
130
  sDict["MinorStatus"] = "idem"
137
- if not sDict["ApplicationStatus"]:
131
+ if not sDict.get("ApplicationStatus"):
138
132
  sDict["ApplicationStatus"] = "idem"
139
- if not sDict["StatusSource"]:
140
- sDict["StatusSource"] = "Unknown"
133
+ if not sDict.get("Source"):
134
+ sDict["Source"] = "Unknown"
141
135
 
142
136
  await job_logging_db.insert_record(
143
137
  job_id,
@@ -145,7 +139,7 @@ async def set_job_status(
145
139
  sDict["MinorStatus"],
146
140
  sDict["ApplicationStatus"],
147
141
  updTime,
148
- sDict["StatusSource"],
142
+ sDict["Source"],
149
143
  )
150
144
 
151
145
  return SetJobStatusReturn(**job_data)
@@ -186,7 +180,7 @@ async def delete_jobs(
186
180
  datetime.now(timezone.utc): JobStatusUpdate(
187
181
  Status=JobStatus.DELETED,
188
182
  MinorStatus="Checking accounting",
189
- StatusSource="job_manager",
183
+ Source="job_manager",
190
184
  )
191
185
  },
192
186
  job_db,
@@ -222,7 +216,7 @@ async def kill_jobs(
222
216
  datetime.now(timezone.utc): JobStatusUpdate(
223
217
  Status=JobStatus.KILLED,
224
218
  MinorStatus="Marked for termination",
225
- StatusSource="job_manager",
219
+ Source="job_manager",
226
220
  )
227
221
  },
228
222
  job_db,
@@ -242,7 +236,7 @@ async def kill_jobs(
242
236
  # datetime.now(timezone.utc): JobStatusUpdate(
243
237
  # Status=JobStatus.KILLED,
244
238
  # MinorStatus="Marked for termination",
245
- # StatusSource="job_manager",
239
+ # Source="job_manager",
246
240
  # )
247
241
  # },
248
242
  # job_db,
diracx/db/sql/utils.py CHANGED
@@ -173,7 +173,7 @@ class BaseSQLDB(metaclass=ABCMeta):
173
173
  raise RuntimeError(f"{self.__class__} was used before entering")
174
174
  return cast(AsyncConnection, self._conn.get())
175
175
 
176
- async def __aenter__(self):
176
+ async def __aenter__(self) -> Self:
177
177
  """
178
178
  Create a connection.
179
179
  This is called by the Dependency mechanism (see ``db_transaction``),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: diracx-db
3
- Version: 0.0.1a6
3
+ Version: 0.0.1a8
4
4
  Summary: TODO
5
5
  License: GPL-3.0-only
6
6
  Classifier: Intended Audience :: Science/Research
@@ -6,7 +6,7 @@ diracx/db/os/__init__.py,sha256=IZr6z6SefrRvuC8sTC4RmB3_wwOyEt1GzpDuwSMH8O4,112
6
6
  diracx/db/os/job_parameters.py,sha256=Knca19uT2G-5FI7MOFlaOAXeHn4ecPVLIH30TiwhaTw,858
7
7
  diracx/db/os/utils.py,sha256=MACBZSn9mzbaY_HAPqk-OBmLI2OYCph_1aI7oqlr-K0,9124
8
8
  diracx/db/sql/__init__.py,sha256=R6tk5lo1EHbt8joGDesesYHcc1swIq9T4AaSixhh7lA,252
9
- diracx/db/sql/utils.py,sha256=DAqqPCQIloWyK_Tpv7CXP3hrO1nq1Bw3CC7Nwlg5Ji4,7849
9
+ diracx/db/sql/utils.py,sha256=BuXjIuXN-_v8YkCoMoMhw2tHVUqG6lTBx-e4VEYWE8o,7857
10
10
  diracx/db/sql/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  diracx/db/sql/auth/db.py,sha256=7GAdwD7g2YR86yQkP9ecuIbFd1h_wxVKnF_GfXfZqLA,9915
12
12
  diracx/db/sql/auth/schema.py,sha256=wutCjZ_uz21J0HHZjwoOXq3cLdlNY2lCR390yIJ_T60,2891
@@ -14,14 +14,14 @@ diracx/db/sql/dummy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
14
14
  diracx/db/sql/dummy/db.py,sha256=5PIPv6aKY7CGIwmvnGKowjVr9ZQWpbjFSd2PIX7YOUw,1627
15
15
  diracx/db/sql/dummy/schema.py,sha256=uEkGDNVZbmJecytkHY1CO-M1MiKxe5w1_h0joJMPC9E,680
16
16
  diracx/db/sql/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- diracx/db/sql/jobs/db.py,sha256=cprktaw2XuvjEsSv6GD1vPWmJTGjmb7RAIaXigufM8w,28337
18
- diracx/db/sql/jobs/schema.py,sha256=6K8zn7kqvfPsCTfuCntSblZU-wrAwzziLW_b26s5KDE,10128
19
- diracx/db/sql/jobs/status_utility.py,sha256=VLFTq6npFF6ACeKofxg70zOgZMOmjD7ynYGYKVAYQAc,10843
17
+ diracx/db/sql/jobs/db.py,sha256=-hzxAoL6Pukpaf3lpMTrzR4s0Md3jENHVxRrmUBKS60,29781
18
+ diracx/db/sql/jobs/schema.py,sha256=5KCMciWOymJqinIwd-gdHZXrMH8p9cPz7fZo0lYHDXk,10143
19
+ diracx/db/sql/jobs/status_utility.py,sha256=0kAt623nh1O5wgsgktctdCmHEynO1nU0vn-7zakNeOA,10525
20
20
  diracx/db/sql/sandbox_metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  diracx/db/sql/sandbox_metadata/db.py,sha256=HcmXlnObD10Lwe_VMyloTRa6TFZgEKOJn7ud8AkisbE,3464
22
22
  diracx/db/sql/sandbox_metadata/schema.py,sha256=VekS-tJqXFqn6DE3bMgZqKyBVkgFCm0mfPx7s9rZ6Bg,1372
23
- diracx_db-0.0.1a6.dist-info/METADATA,sha256=eFneeg6C3nt-R4ahzp7Tss_SP_YVKv3_VL8R5nYiMb0,680
24
- diracx_db-0.0.1a6.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
25
- diracx_db-0.0.1a6.dist-info/entry_points.txt,sha256=xEFGu_zgmPgQPlUeFtdahQfQIboJ1ugFOK8eMio9gtw,271
26
- diracx_db-0.0.1a6.dist-info/top_level.txt,sha256=vJx10tdRlBX3rF2Psgk5jlwVGZNcL3m_7iQWwgPXt-U,7
27
- diracx_db-0.0.1a6.dist-info/RECORD,,
23
+ diracx_db-0.0.1a8.dist-info/METADATA,sha256=XUbk4aS7IZCL8ie9Rcpg57GSmiWjOolNF07ExUEd-DI,680
24
+ diracx_db-0.0.1a8.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
25
+ diracx_db-0.0.1a8.dist-info/entry_points.txt,sha256=xEFGu_zgmPgQPlUeFtdahQfQIboJ1ugFOK8eMio9gtw,271
26
+ diracx_db-0.0.1a8.dist-info/top_level.txt,sha256=vJx10tdRlBX3rF2Psgk5jlwVGZNcL3m_7iQWwgPXt-U,7
27
+ diracx_db-0.0.1a8.dist-info/RECORD,,