diracx-db 0.0.1a17__py3-none-any.whl → 0.0.1a19__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,109 @@
1
+ from sqlalchemy import (
2
+ BigInteger,
3
+ Boolean,
4
+ Float,
5
+ ForeignKey,
6
+ Index,
7
+ Integer,
8
+ String,
9
+ )
10
+ from sqlalchemy.orm import declarative_base
11
+
12
+ from ..utils import Column
13
+
14
+ TaskQueueDBBase = declarative_base()
15
+
16
+
17
+ class TaskQueues(TaskQueueDBBase):
18
+ __tablename__ = "tq_TaskQueues"
19
+ TQId = Column(Integer, primary_key=True)
20
+ Owner = Column(String(255), nullable=False)
21
+ OwnerGroup = Column(String(32), nullable=False)
22
+ VO = Column(String(32), nullable=False)
23
+ CPUTime = Column(BigInteger, nullable=False)
24
+ Priority = Column(Float, nullable=False)
25
+ Enabled = Column(Boolean, nullable=False, default=0)
26
+ __table_args__ = (Index("TQOwner", "Owner", "OwnerGroup", "CPUTime"),)
27
+
28
+
29
+ class JobsQueue(TaskQueueDBBase):
30
+ __tablename__ = "tq_Jobs"
31
+ TQId = Column(
32
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
33
+ )
34
+ JobId = Column(Integer, primary_key=True)
35
+ Priority = Column(Integer, nullable=False)
36
+ RealPriority = Column(Float, nullable=False)
37
+ __table_args__ = (Index("TaskIndex", "TQId"),)
38
+
39
+
40
+ class SitesQueue(TaskQueueDBBase):
41
+ __tablename__ = "tq_TQToSites"
42
+ TQId = Column(
43
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
44
+ )
45
+ Value = Column(String(64), primary_key=True)
46
+ __table_args__ = (
47
+ Index("SitesTaskIndex", "TQId"),
48
+ Index("SitesIndex", "Value"),
49
+ )
50
+
51
+
52
+ class GridCEsQueue(TaskQueueDBBase):
53
+ __tablename__ = "tq_TQToGridCEs"
54
+ TQId = Column(
55
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
56
+ )
57
+ Value = Column(String(64), primary_key=True)
58
+ __table_args__ = (
59
+ Index("GridCEsTaskIndex", "TQId"),
60
+ Index("GridCEsValueIndex", "Value"),
61
+ )
62
+
63
+
64
+ class BannedSitesQueue(TaskQueueDBBase):
65
+ __tablename__ = "tq_TQToBannedSites"
66
+ TQId = Column(
67
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
68
+ )
69
+ Value = Column(String(64), primary_key=True)
70
+ __table_args__ = (
71
+ Index("BannedSitesTaskIndex", "TQId"),
72
+ Index("BannedSitesValueIndex", "Value"),
73
+ )
74
+
75
+
76
+ class PlatformsQueue(TaskQueueDBBase):
77
+ __tablename__ = "tq_TQToPlatforms"
78
+ TQId = Column(
79
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
80
+ )
81
+ Value = Column(String(64), primary_key=True)
82
+ __table_args__ = (
83
+ Index("PlatformsTaskIndex", "TQId"),
84
+ Index("PlatformsValueIndex", "Value"),
85
+ )
86
+
87
+
88
+ class JobTypesQueue(TaskQueueDBBase):
89
+ __tablename__ = "tq_TQToJobTypes"
90
+ TQId = Column(
91
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
92
+ )
93
+ Value = Column(String(64), primary_key=True)
94
+ __table_args__ = (
95
+ Index("JobTypesTaskIndex", "TQId"),
96
+ Index("JobTypesValueIndex", "Value"),
97
+ )
98
+
99
+
100
+ class TagsQueue(TaskQueueDBBase):
101
+ __tablename__ = "tq_TQToTags"
102
+ TQId = Column(
103
+ Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
104
+ )
105
+ Value = Column(String(64), primary_key=True)
106
+ __table_args__ = (
107
+ Index("TagsTaskIndex", "TQId"),
108
+ Index("TagsValueIndex", "Value"),
109
+ )
@@ -0,0 +1,445 @@
1
+ from __future__ import annotations
2
+
3
+ __all__ = ("utcnow", "Column", "NullColumn", "DateNowColumn", "BaseSQLDB")
4
+
5
+ import contextlib
6
+ import logging
7
+ import os
8
+ import re
9
+ from abc import ABCMeta
10
+ from collections.abc import AsyncIterator
11
+ from contextvars import ContextVar
12
+ from datetime import datetime, timedelta, timezone
13
+ from functools import partial
14
+ from typing import TYPE_CHECKING, Self, cast
15
+
16
+ import sqlalchemy.types as types
17
+ from pydantic import TypeAdapter
18
+ from sqlalchemy import Column as RawColumn
19
+ from sqlalchemy import DateTime, Enum, MetaData, select
20
+ from sqlalchemy.exc import OperationalError
21
+ from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
22
+ from sqlalchemy.ext.compiler import compiles
23
+ from sqlalchemy.sql import expression
24
+
25
+ from diracx.core.exceptions import InvalidQueryError
26
+ from diracx.core.extensions import select_from_extension
27
+ from diracx.core.models import SortDirection
28
+ from diracx.core.settings import SqlalchemyDsn
29
+ from diracx.db.exceptions import DBUnavailable
30
+
31
+ if TYPE_CHECKING:
32
+ from sqlalchemy.types import TypeEngine
33
+
34
+ logger = logging.getLogger(__name__)
35
+
36
+
37
+ class utcnow(expression.FunctionElement):
38
+ type: TypeEngine = DateTime()
39
+ inherit_cache: bool = True
40
+
41
+
42
+ @compiles(utcnow, "postgresql")
43
+ def pg_utcnow(element, compiler, **kw) -> str:
44
+ return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
45
+
46
+
47
+ @compiles(utcnow, "mssql")
48
+ def ms_utcnow(element, compiler, **kw) -> str:
49
+ return "GETUTCDATE()"
50
+
51
+
52
+ @compiles(utcnow, "mysql")
53
+ def mysql_utcnow(element, compiler, **kw) -> str:
54
+ return "(UTC_TIMESTAMP)"
55
+
56
+
57
+ @compiles(utcnow, "sqlite")
58
+ def sqlite_utcnow(element, compiler, **kw) -> str:
59
+ return "DATETIME('now')"
60
+
61
+
62
+ class date_trunc(expression.FunctionElement):
63
+ """Sqlalchemy function to truncate a date to a given resolution.
64
+
65
+ Primarily used to be able to query for a specific resolution of a date e.g.
66
+
67
+ select * from table where date_trunc('day', date_column) = '2021-01-01'
68
+ select * from table where date_trunc('year', date_column) = '2021'
69
+ select * from table where date_trunc('minute', date_column) = '2021-01-01 12:00'
70
+ """
71
+
72
+ type = DateTime()
73
+ inherit_cache = True
74
+
75
+ def __init__(self, *args, time_resolution, **kwargs) -> None:
76
+ super().__init__(*args, **kwargs)
77
+ self._time_resolution = time_resolution
78
+
79
+
80
+ @compiles(date_trunc, "postgresql")
81
+ def pg_date_trunc(element, compiler, **kw):
82
+ res = {
83
+ "SECOND": "second",
84
+ "MINUTE": "minute",
85
+ "HOUR": "hour",
86
+ "DAY": "day",
87
+ "MONTH": "month",
88
+ "YEAR": "year",
89
+ }[element._time_resolution]
90
+ return f"date_trunc('{res}', {compiler.process(element.clauses)})"
91
+
92
+
93
+ @compiles(date_trunc, "mysql")
94
+ def mysql_date_trunc(element, compiler, **kw):
95
+ pattern = {
96
+ "SECOND": "%Y-%m-%d %H:%i:%S",
97
+ "MINUTE": "%Y-%m-%d %H:%i",
98
+ "HOUR": "%Y-%m-%d %H",
99
+ "DAY": "%Y-%m-%d",
100
+ "MONTH": "%Y-%m",
101
+ "YEAR": "%Y",
102
+ }[element._time_resolution]
103
+ return f"DATE_FORMAT({compiler.process(element.clauses)}, '{pattern}')"
104
+
105
+
106
+ @compiles(date_trunc, "sqlite")
107
+ def sqlite_date_trunc(element, compiler, **kw):
108
+ pattern = {
109
+ "SECOND": "%Y-%m-%d %H:%M:%S",
110
+ "MINUTE": "%Y-%m-%d %H:%M",
111
+ "HOUR": "%Y-%m-%d %H",
112
+ "DAY": "%Y-%m-%d",
113
+ "MONTH": "%Y-%m",
114
+ "YEAR": "%Y",
115
+ }[element._time_resolution]
116
+ return f"strftime('{pattern}', {compiler.process(element.clauses)})"
117
+
118
+
119
+ def substract_date(**kwargs: float) -> datetime:
120
+ return datetime.now(tz=timezone.utc) - timedelta(**kwargs)
121
+
122
+
123
+ Column: partial[RawColumn] = partial(RawColumn, nullable=False)
124
+ NullColumn: partial[RawColumn] = partial(RawColumn, nullable=True)
125
+ DateNowColumn = partial(Column, type_=DateTime(timezone=True), server_default=utcnow())
126
+
127
+
128
+ def EnumColumn(enum_type, **kwargs):
129
+ return Column(Enum(enum_type, native_enum=False, length=16), **kwargs)
130
+
131
+
132
+ class EnumBackedBool(types.TypeDecorator):
133
+ """Maps a ``EnumBackedBool()`` column to True/False in Python."""
134
+
135
+ impl = types.Enum
136
+ cache_ok: bool = True
137
+
138
+ def __init__(self) -> None:
139
+ super().__init__("True", "False")
140
+
141
+ def process_bind_param(self, value, dialect) -> str:
142
+ if value is True:
143
+ return "True"
144
+ elif value is False:
145
+ return "False"
146
+ else:
147
+ raise NotImplementedError(value, dialect)
148
+
149
+ def process_result_value(self, value, dialect) -> bool:
150
+ if value == "True":
151
+ return True
152
+ elif value == "False":
153
+ return False
154
+ else:
155
+ raise NotImplementedError(f"Unknown {value=}")
156
+
157
+
158
+ class SQLDBError(Exception):
159
+ pass
160
+
161
+
162
+ class SQLDBUnavailable(DBUnavailable, SQLDBError):
163
+ """Used whenever we encounter a problem with the B connection."""
164
+
165
+
166
+ class BaseSQLDB(metaclass=ABCMeta):
167
+ """This should be the base class of all the SQL DiracX DBs.
168
+
169
+ The details covered here should be handled automatically by the service and
170
+ task machinery of DiracX and this documentation exists for informational
171
+ purposes.
172
+
173
+ The available databases are discovered by calling `BaseSQLDB.available_urls`.
174
+ This method returns a mapping of database names to connection URLs. The
175
+ available databases are determined by the `diracx.dbs.sql` entrypoint in the
176
+ `pyproject.toml` file and the connection URLs are taken from the environment
177
+ variables of the form `DIRACX_DB_URL_<db-name>`.
178
+
179
+ If extensions to DiracX are being used, there can be multiple implementations
180
+ of the same database. To list the available implementations use
181
+ `BaseSQLDB.available_implementations(db_name)`. The first entry in this list
182
+ will be the preferred implementation and it can be initialized by calling
183
+ it's `__init__` function with a URL perviously obtained from
184
+ `BaseSQLDB.available_urls`.
185
+
186
+ To control the lifetime of the SQLAlchemy engine used for connecting to the
187
+ database, which includes the connection pool, the `BaseSQLDB.engine_context`
188
+ asynchronous context manager should be entered. When inside this context
189
+ manager, the engine can be accessed with `BaseSQLDB.engine`.
190
+
191
+ Upon entering, the DB class can then be used as an asynchronous context
192
+ manager to enter transactions. If an exception is raised the transaction is
193
+ rolled back automatically. If the inner context exits peacefully, the
194
+ transaction is committed automatically. When inside this context manager,
195
+ the DB connection can be accessed with `BaseSQLDB.conn`.
196
+
197
+ For example:
198
+
199
+ ```python
200
+ db_name = ...
201
+ url = BaseSQLDB.available_urls()[db_name]
202
+ MyDBClass = BaseSQLDB.available_implementations(db_name)[0]
203
+
204
+ db = MyDBClass(url)
205
+ async with db.engine_context:
206
+ async with db:
207
+ # Do something in the first transaction
208
+ # Commit will be called automatically
209
+
210
+ async with db:
211
+ # This transaction will be rolled back due to the exception
212
+ raise Exception(...)
213
+ ```
214
+ """
215
+
216
+ # engine: AsyncEngine
217
+ # TODO: Make metadata an abstract property
218
+ metadata: MetaData
219
+
220
+ def __init__(self, db_url: str) -> None:
221
+ # We use a ContextVar to make sure that self._conn
222
+ # is specific to each context, and avoid parallel
223
+ # route executions to overlap
224
+ self._conn: ContextVar[AsyncConnection | None] = ContextVar(
225
+ "_conn", default=None
226
+ )
227
+ self._db_url = db_url
228
+ self._engine: AsyncEngine | None = None
229
+
230
+ @classmethod
231
+ def available_implementations(cls, db_name: str) -> list[type[BaseSQLDB]]:
232
+ """Return the available implementations of the DB in reverse priority order."""
233
+ db_classes: list[type[BaseSQLDB]] = [
234
+ entry_point.load()
235
+ for entry_point in select_from_extension(
236
+ group="diracx.db.sql", name=db_name
237
+ )
238
+ ]
239
+ if not db_classes:
240
+ raise NotImplementedError(f"Could not find any matches for {db_name=}")
241
+ return db_classes
242
+
243
+ @classmethod
244
+ def available_urls(cls) -> dict[str, str]:
245
+ """Return a dict of available database urls.
246
+
247
+ The list of available URLs is determined by environment variables
248
+ prefixed with ``DIRACX_DB_URL_{DB_NAME}``.
249
+ """
250
+ db_urls: dict[str, str] = {}
251
+ for entry_point in select_from_extension(group="diracx.db.sql"):
252
+ db_name = entry_point.name
253
+ var_name = f"DIRACX_DB_URL_{entry_point.name.upper()}"
254
+ if var_name in os.environ:
255
+ try:
256
+ db_url = os.environ[var_name]
257
+ if db_url == "sqlite+aiosqlite:///:memory:":
258
+ db_urls[db_name] = db_url
259
+ else:
260
+ db_urls[db_name] = str(
261
+ TypeAdapter(SqlalchemyDsn).validate_python(db_url)
262
+ )
263
+ except Exception:
264
+ logger.error("Error loading URL for %s", db_name)
265
+ raise
266
+ return db_urls
267
+
268
+ @classmethod
269
+ def transaction(cls) -> Self:
270
+ raise NotImplementedError("This should never be called")
271
+
272
+ @property
273
+ def engine(self) -> AsyncEngine:
274
+ """The engine to use for database operations.
275
+
276
+ It is normally not necessary to use the engine directly, unless you are
277
+ doing something special, like writing a test fixture that gives you a db.
278
+
279
+ Requires that the engine_context has been entered.
280
+ """
281
+ assert self._engine is not None, "engine_context must be entered"
282
+ return self._engine
283
+
284
+ @contextlib.asynccontextmanager
285
+ async def engine_context(self) -> AsyncIterator[None]:
286
+ """Context manage to manage the engine lifecycle.
287
+
288
+ This is called once at the application startup (see ``lifetime_functions``).
289
+ """
290
+ assert self._engine is None, "engine_context cannot be nested"
291
+
292
+ # Set the pool_recycle to 30mn
293
+ # That should prevent the problem of MySQL expiring connection
294
+ # after 60mn by default
295
+ engine = create_async_engine(self._db_url, pool_recycle=60 * 30)
296
+ self._engine = engine
297
+ try:
298
+ yield
299
+ finally:
300
+ self._engine = None
301
+ await engine.dispose()
302
+
303
+ @property
304
+ def conn(self) -> AsyncConnection:
305
+ if self._conn.get() is None:
306
+ raise RuntimeError(f"{self.__class__} was used before entering")
307
+ return cast(AsyncConnection, self._conn.get())
308
+
309
+ async def __aenter__(self) -> Self:
310
+ """Create a connection.
311
+
312
+ This is called by the Dependency mechanism (see ``db_transaction``),
313
+ It will create a new connection/transaction for each route call.
314
+ """
315
+ assert self._conn.get() is None, "BaseSQLDB context cannot be nested"
316
+ try:
317
+ self._conn.set(await self.engine.connect().__aenter__())
318
+ except Exception as e:
319
+ raise SQLDBUnavailable(
320
+ f"Cannot connect to {self.__class__.__name__}"
321
+ ) from e
322
+
323
+ return self
324
+
325
+ async def __aexit__(self, exc_type, exc, tb):
326
+ """This is called when exiting a route.
327
+
328
+ If there was no exception, the changes in the DB are committed.
329
+ Otherwise, they are rolled back.
330
+ """
331
+ if exc_type is None:
332
+ await self._conn.get().commit()
333
+ await self._conn.get().__aexit__(exc_type, exc, tb)
334
+ self._conn.set(None)
335
+
336
+ async def ping(self):
337
+ """Check whether the connection to the DB is still working.
338
+
339
+ We could enable the ``pre_ping`` in the engine, but this would be ran at
340
+ every query.
341
+ """
342
+ try:
343
+ await self.conn.scalar(select(1))
344
+ except OperationalError as e:
345
+ raise SQLDBUnavailable("Cannot ping the DB") from e
346
+
347
+
348
+ def find_time_resolution(value):
349
+ if isinstance(value, datetime):
350
+ return None, value
351
+ if match := re.fullmatch(
352
+ r"\d{4}(-\d{2}(-\d{2}(([ T])\d{2}(:\d{2}(:\d{2}(\.\d{6}Z?)?)?)?)?)?)?", value
353
+ ):
354
+ if match.group(6):
355
+ precision, pattern = "SECOND", r"\1-\2-\3 \4:\5:\6"
356
+ elif match.group(5):
357
+ precision, pattern = "MINUTE", r"\1-\2-\3 \4:\5"
358
+ elif match.group(3):
359
+ precision, pattern = "HOUR", r"\1-\2-\3 \4"
360
+ elif match.group(2):
361
+ precision, pattern = "DAY", r"\1-\2-\3"
362
+ elif match.group(1):
363
+ precision, pattern = "MONTH", r"\1-\2"
364
+ else:
365
+ precision, pattern = "YEAR", r"\1"
366
+ return (
367
+ precision,
368
+ re.sub(
369
+ r"^(\d{4})-?(\d{2})?-?(\d{2})?[ T]?(\d{2})?:?(\d{2})?:?(\d{2})?\.?(\d{6})?Z?$",
370
+ pattern,
371
+ value,
372
+ ),
373
+ )
374
+
375
+ raise InvalidQueryError(f"Cannot parse {value=}")
376
+
377
+
378
+ def apply_search_filters(column_mapping, stmt, search):
379
+ for query in search:
380
+ try:
381
+ column = column_mapping(query["parameter"])
382
+ except KeyError as e:
383
+ raise InvalidQueryError(f"Unknown column {query['parameter']}") from e
384
+
385
+ if isinstance(column.type, DateTime):
386
+ if "value" in query and isinstance(query["value"], str):
387
+ resolution, value = find_time_resolution(query["value"])
388
+ if resolution:
389
+ column = date_trunc(column, time_resolution=resolution)
390
+ query["value"] = value
391
+
392
+ if query.get("values"):
393
+ resolutions, values = zip(
394
+ *map(find_time_resolution, query.get("values"))
395
+ )
396
+ if len(set(resolutions)) != 1:
397
+ raise InvalidQueryError(
398
+ f"Cannot mix different time resolutions in {query=}"
399
+ )
400
+ if resolution := resolutions[0]:
401
+ column = date_trunc(column, time_resolution=resolution)
402
+ query["values"] = values
403
+
404
+ if query["operator"] == "eq":
405
+ expr = column == query["value"]
406
+ elif query["operator"] == "neq":
407
+ expr = column != query["value"]
408
+ elif query["operator"] == "gt":
409
+ expr = column > query["value"]
410
+ elif query["operator"] == "lt":
411
+ expr = column < query["value"]
412
+ elif query["operator"] == "in":
413
+ expr = column.in_(query["values"])
414
+ elif query["operator"] == "not in":
415
+ expr = column.notin_(query["values"])
416
+ elif query["operator"] in "like":
417
+ expr = column.like(query["value"])
418
+ elif query["operator"] in "ilike":
419
+ expr = column.ilike(query["value"])
420
+ else:
421
+ raise InvalidQueryError(f"Unknown filter {query=}")
422
+ stmt = stmt.where(expr)
423
+ return stmt
424
+
425
+
426
+ def apply_sort_constraints(column_mapping, stmt, sorts):
427
+ sort_columns = []
428
+ for sort in sorts or []:
429
+ try:
430
+ column = column_mapping(sort["parameter"])
431
+ except KeyError as e:
432
+ raise InvalidQueryError(
433
+ f"Cannot sort by {sort['parameter']}: unknown column"
434
+ ) from e
435
+ sorted_column = None
436
+ if sort["direction"] == SortDirection.ASC:
437
+ sorted_column = column.asc()
438
+ elif sort["direction"] == SortDirection.DESC:
439
+ sorted_column = column.desc()
440
+ else:
441
+ raise InvalidQueryError(f"Unknown sort {sort['direction']=}")
442
+ sort_columns.append(sorted_column)
443
+ if sort_columns:
444
+ stmt = stmt.order_by(*sort_columns)
445
+ return stmt
@@ -12,8 +12,8 @@ from diracx.core.models import (
12
12
  ScalarSearchOperator,
13
13
  SetJobStatusReturn,
14
14
  )
15
- from diracx.db.sql.jobs.db import JobDB, JobLoggingDB, TaskQueueDB
16
- from diracx.db.sql.sandbox_metadata.db import SandboxMetadataDB
15
+
16
+ from .. import JobDB, JobLoggingDB, SandboxMetadataDB, TaskQueueDB
17
17
 
18
18
 
19
19
  async def set_job_status(
@@ -26,11 +26,10 @@ async def set_job_status(
26
26
  """Set various status fields for job specified by its jobId.
27
27
  Set only the last status in the JobDB, updating all the status
28
28
  logging information in the JobLoggingDB. The status dict has datetime
29
- as a key and status information dictionary as values
29
+ as a key and status information dictionary as values.
30
30
 
31
31
  :raises: JobNotFound if the job is not found in one of the DBs
32
32
  """
33
-
34
33
  from DIRAC.Core.Utilities import TimeUtilities
35
34
  from DIRAC.Core.Utilities.ReturnValues import returnValueOrRaise
36
35
  from DIRAC.WorkloadManagementSystem.Utilities.JobStatusUtility import (
@@ -160,11 +159,10 @@ async def delete_jobs(
160
159
  task_queue_db: TaskQueueDB,
161
160
  background_task: BackgroundTasks,
162
161
  ):
163
- """
164
- "Delete" jobs by removing them from the task queues, set kill as a job command setting the job status to DELETED.
165
- :raises: BaseExceptionGroup[JobNotFound] for every job that was not found
166
- """
162
+ """Removing jobs from task queues, send a kill command and set status to DELETED.
167
163
 
164
+ :raises: BaseExceptionGroup[JobNotFound] for every job that was not found.
165
+ """
168
166
  await _remove_jobs_from_task_queue(job_ids, config, task_queue_db, background_task)
169
167
  # TODO: implement StorageManagerClient
170
168
  # returnValueOrRaise(StorageManagerClient().killTasksBySourceTaskID(job_ids))
@@ -198,9 +196,8 @@ async def kill_jobs(
198
196
  task_queue_db: TaskQueueDB,
199
197
  background_task: BackgroundTasks,
200
198
  ):
201
- """
202
- Kill jobs by removing them from the task queues, set kill as a job command and setting the job status to KILLED.
203
- :raises: BaseExceptionGroup[JobNotFound] for every job that was not found
199
+ """Kill jobs by removing them from the task queues, set kill as a job command and setting the job status to KILLED.
200
+ :raises: BaseExceptionGroup[JobNotFound] for every job that was not found.
204
201
  """
205
202
  await _remove_jobs_from_task_queue(job_ids, config, task_queue_db, background_task)
206
203
  # TODO: implement StorageManagerClient
@@ -259,11 +256,9 @@ async def remove_jobs(
259
256
  task_queue_db: TaskQueueDB,
260
257
  background_task: BackgroundTasks,
261
258
  ):
259
+ """Fully remove a job from the WMS databases.
260
+ :raises: nothing.
262
261
  """
263
- Fully remove a job from the WMS databases.
264
- :raises: nothing
265
- """
266
-
267
262
  # Remove the staging task from the StorageManager
268
263
  # TODO: this was not done in the JobManagerHandler, but it was done in the kill method
269
264
  # I think it should be done here too
@@ -290,9 +285,7 @@ async def _remove_jobs_from_task_queue(
290
285
  task_queue_db: TaskQueueDB,
291
286
  background_task: BackgroundTasks,
292
287
  ):
293
- """
294
- Remove the job from TaskQueueDB
295
- """
288
+ """Remove the job from TaskQueueDB."""
296
289
  tq_infos = await task_queue_db.get_tq_infos_for_jobs(job_ids)
297
290
  await task_queue_db.remove_jobs(job_ids)
298
291
  for tq_id, owner, owner_group, vo in tq_infos:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: diracx-db
3
- Version: 0.0.1a17
3
+ Version: 0.0.1a19
4
4
  Summary: TODO
5
5
  License: GPL-3.0-only
6
6
  Classifier: Intended Audience :: Science/Research
@@ -8,14 +8,14 @@ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Topic :: Scientific/Engineering
10
10
  Classifier: Topic :: System :: Distributed Computing
11
- Requires-Python: >=3.10
11
+ Requires-Python: >=3.11
12
12
  Description-Content-Type: text/markdown
13
13
  Requires-Dist: dirac
14
14
  Requires-Dist: diracx-core
15
15
  Requires-Dist: fastapi
16
16
  Requires-Dist: opensearch-py[async]
17
- Requires-Dist: pydantic >=2.4
18
- Requires-Dist: sqlalchemy[aiomysql,aiosqlite] >=2
17
+ Requires-Dist: pydantic>=2.4
18
+ Requires-Dist: sqlalchemy[aiomysql,aiosqlite]>=2
19
19
  Provides-Extra: testing
20
- Requires-Dist: diracx-testing ; extra == 'testing'
20
+ Requires-Dist: diracx-testing; extra == "testing"
21
21
 
@@ -0,0 +1,36 @@
1
+ diracx/db/__init__.py,sha256=2oeUeVwZq53bo_ZOflEYZsBn7tcR5Tzb2AIu0TAWELM,109
2
+ diracx/db/__main__.py,sha256=tU4tp3OAClYCiPMxlRj524sZGBx9oy4CoWHd8pMuEEs,1715
3
+ diracx/db/exceptions.py,sha256=-LSkEwsvjwU7vXqx-xeLvLKInTRAhjwB7K_AKfQcIH8,41
4
+ diracx/db/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ diracx/db/os/__init__.py,sha256=IZr6z6SefrRvuC8sTC4RmB3_wwOyEt1GzpDuwSMH8O4,112
6
+ diracx/db/os/job_parameters.py,sha256=Knca19uT2G-5FI7MOFlaOAXeHn4ecPVLIH30TiwhaTw,858
7
+ diracx/db/os/utils.py,sha256=9UyhgMqaI8loh8chW2zHW-9JAOtH5YfktC-d-uY5Wnk,11346
8
+ diracx/db/sql/__init__.py,sha256=JYu0b0IVhoXy3lX2m2r2dmAjsRS7IbECBUMEDvX0Te4,391
9
+ diracx/db/sql/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ diracx/db/sql/auth/db.py,sha256=Wi4oeHCL4pPiaysZEx8R0KNk9BDxncAAtOD2qnD-NnY,10206
11
+ diracx/db/sql/auth/schema.py,sha256=W5whp1ZK_SNt-wxWVRBegmrc9IgqCR1LFY1FWwUlEBs,2828
12
+ diracx/db/sql/dummy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ diracx/db/sql/dummy/db.py,sha256=4Xyo7gUh_5b6Q2a_ggJG6e7fCtc9HrP_BRXfKGfqZIs,1642
14
+ diracx/db/sql/dummy/schema.py,sha256=uEkGDNVZbmJecytkHY1CO-M1MiKxe5w1_h0joJMPC9E,680
15
+ diracx/db/sql/job/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ diracx/db/sql/job/db.py,sha256=DaU1SGeXl7TqX1QxT1RldCeMIOYGnzNwkqBrwgGE90A,16248
17
+ diracx/db/sql/job/schema.py,sha256=w9Ht9LyVK-fB5T9-hYGsqifzneeG2YP123j1-Mx8Xio,4283
18
+ diracx/db/sql/job_logging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
+ diracx/db/sql/job_logging/db.py,sha256=OGVHYIDcWhGYVfHacsz9DEPSoJ7aRbKVoQOyCCj8XvU,5036
20
+ diracx/db/sql/job_logging/schema.py,sha256=dD2arl-6bffeK8INT6tZ1HWEpJuYTx2iNiVzswVXXF8,812
21
+ diracx/db/sql/pilot_agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ diracx/db/sql/pilot_agents/db.py,sha256=7-cuCbh_KhM0jlybsHMWV-W66bHsPHIVBpbuqwjncj0,1232
23
+ diracx/db/sql/pilot_agents/schema.py,sha256=d4sVlhWpMVgHguS-VlDR3zqM0Yj5QVEjon3gBgrr-kk,2091
24
+ diracx/db/sql/sandbox_metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
+ diracx/db/sql/sandbox_metadata/db.py,sha256=ZoiKjs-Rdd4p-kOZOLoUkpP0xGIp0oGjou3D2oP_6GE,6452
26
+ diracx/db/sql/sandbox_metadata/schema.py,sha256=rngYYkJxBhjETBHGLD1CTipDGe44mRYR0wdaFoAJwp0,1400
27
+ diracx/db/sql/task_queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ diracx/db/sql/task_queue/db.py,sha256=e6yauZO0nWaUVqjqQycH8iPO4wXLXaC82eaIq1K_KI8,9102
29
+ diracx/db/sql/task_queue/schema.py,sha256=fvzQyCw_xWAOWTLW6Qrp1m-WzEKb0tlYmafoLTbCy1I,3222
30
+ diracx/db/sql/utils/__init__.py,sha256=8YcDra_P_FL3QIDzl53HvN1tHUE396F0tGKB-TPQxeM,15541
31
+ diracx/db/sql/utils/job_status.py,sha256=GNQTKiyguhnB348mLIB7BT-PEOEKpKljR4JzvOd_h8M,10414
32
+ diracx_db-0.0.1a19.dist-info/METADATA,sha256=_e0zgiSjl92T83BUpBXkEHzMeCcLcr4L_sE9h9v_iwQ,688
33
+ diracx_db-0.0.1a19.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
34
+ diracx_db-0.0.1a19.dist-info/entry_points.txt,sha256=YLI4f6640bri8Ud6Jt9WNq79pSTVQAkfUasb9f75fR8,315
35
+ diracx_db-0.0.1a19.dist-info/top_level.txt,sha256=vJx10tdRlBX3rF2Psgk5jlwVGZNcL3m_7iQWwgPXt-U,7
36
+ diracx_db-0.0.1a19.dist-info/RECORD,,