diracx-db 0.0.1a17__py3-none-any.whl → 0.0.1a18__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- diracx/db/os/utils.py +60 -11
- diracx/db/sql/__init__.py +3 -1
- diracx/db/sql/auth/db.py +10 -19
- diracx/db/sql/auth/schema.py +5 -7
- diracx/db/sql/dummy/db.py +2 -3
- diracx/db/sql/{jobs → job}/db.py +12 -452
- diracx/db/sql/{jobs → job}/schema.py +2 -118
- diracx/db/sql/job_logging/__init__.py +0 -0
- diracx/db/sql/job_logging/db.py +161 -0
- diracx/db/sql/job_logging/schema.py +25 -0
- diracx/db/sql/sandbox_metadata/db.py +12 -10
- diracx/db/sql/task_queue/__init__.py +0 -0
- diracx/db/sql/task_queue/db.py +261 -0
- diracx/db/sql/task_queue/schema.py +109 -0
- diracx/db/sql/utils/__init__.py +418 -0
- diracx/db/sql/{jobs/status_utility.py → utils/job_status.py} +11 -18
- {diracx_db-0.0.1a17.dist-info → diracx_db-0.0.1a18.dist-info}/METADATA +5 -5
- diracx_db-0.0.1a18.dist-info/RECORD +33 -0
- {diracx_db-0.0.1a17.dist-info → diracx_db-0.0.1a18.dist-info}/WHEEL +1 -1
- diracx/db/sql/utils.py +0 -236
- diracx_db-0.0.1a17.dist-info/RECORD +0 -27
- /diracx/db/sql/{jobs → job}/__init__.py +0 -0
- {diracx_db-0.0.1a17.dist-info → diracx_db-0.0.1a18.dist-info}/entry_points.txt +0 -0
- {diracx_db-0.0.1a17.dist-info → diracx_db-0.0.1a18.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,418 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
__all__ = ("utcnow", "Column", "NullColumn", "DateNowColumn", "BaseSQLDB")
|
4
|
+
|
5
|
+
import contextlib
|
6
|
+
import logging
|
7
|
+
import os
|
8
|
+
import re
|
9
|
+
from abc import ABCMeta
|
10
|
+
from collections.abc import AsyncIterator
|
11
|
+
from contextvars import ContextVar
|
12
|
+
from datetime import datetime, timedelta, timezone
|
13
|
+
from functools import partial
|
14
|
+
from typing import TYPE_CHECKING, Self, cast
|
15
|
+
|
16
|
+
from pydantic import TypeAdapter
|
17
|
+
from sqlalchemy import Column as RawColumn
|
18
|
+
from sqlalchemy import DateTime, Enum, MetaData, select
|
19
|
+
from sqlalchemy.exc import OperationalError
|
20
|
+
from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
|
21
|
+
from sqlalchemy.ext.compiler import compiles
|
22
|
+
from sqlalchemy.sql import expression
|
23
|
+
|
24
|
+
from diracx.core.exceptions import InvalidQueryError
|
25
|
+
from diracx.core.extensions import select_from_extension
|
26
|
+
from diracx.core.models import SortDirection
|
27
|
+
from diracx.core.settings import SqlalchemyDsn
|
28
|
+
from diracx.db.exceptions import DBUnavailable
|
29
|
+
|
30
|
+
if TYPE_CHECKING:
|
31
|
+
from sqlalchemy.types import TypeEngine
|
32
|
+
|
33
|
+
logger = logging.getLogger(__name__)
|
34
|
+
|
35
|
+
|
36
|
+
class utcnow(expression.FunctionElement):
|
37
|
+
type: TypeEngine = DateTime()
|
38
|
+
inherit_cache: bool = True
|
39
|
+
|
40
|
+
|
41
|
+
@compiles(utcnow, "postgresql")
|
42
|
+
def pg_utcnow(element, compiler, **kw) -> str:
|
43
|
+
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
|
44
|
+
|
45
|
+
|
46
|
+
@compiles(utcnow, "mssql")
|
47
|
+
def ms_utcnow(element, compiler, **kw) -> str:
|
48
|
+
return "GETUTCDATE()"
|
49
|
+
|
50
|
+
|
51
|
+
@compiles(utcnow, "mysql")
|
52
|
+
def mysql_utcnow(element, compiler, **kw) -> str:
|
53
|
+
return "(UTC_TIMESTAMP)"
|
54
|
+
|
55
|
+
|
56
|
+
@compiles(utcnow, "sqlite")
|
57
|
+
def sqlite_utcnow(element, compiler, **kw) -> str:
|
58
|
+
return "DATETIME('now')"
|
59
|
+
|
60
|
+
|
61
|
+
class date_trunc(expression.FunctionElement):
|
62
|
+
"""Sqlalchemy function to truncate a date to a given resolution.
|
63
|
+
|
64
|
+
Primarily used to be able to query for a specific resolution of a date e.g.
|
65
|
+
|
66
|
+
select * from table where date_trunc('day', date_column) = '2021-01-01'
|
67
|
+
select * from table where date_trunc('year', date_column) = '2021'
|
68
|
+
select * from table where date_trunc('minute', date_column) = '2021-01-01 12:00'
|
69
|
+
"""
|
70
|
+
|
71
|
+
type = DateTime()
|
72
|
+
inherit_cache = True
|
73
|
+
|
74
|
+
def __init__(self, *args, time_resolution, **kwargs) -> None:
|
75
|
+
super().__init__(*args, **kwargs)
|
76
|
+
self._time_resolution = time_resolution
|
77
|
+
|
78
|
+
|
79
|
+
@compiles(date_trunc, "postgresql")
|
80
|
+
def pg_date_trunc(element, compiler, **kw):
|
81
|
+
res = {
|
82
|
+
"SECOND": "second",
|
83
|
+
"MINUTE": "minute",
|
84
|
+
"HOUR": "hour",
|
85
|
+
"DAY": "day",
|
86
|
+
"MONTH": "month",
|
87
|
+
"YEAR": "year",
|
88
|
+
}[element._time_resolution]
|
89
|
+
return f"date_trunc('{res}', {compiler.process(element.clauses)})"
|
90
|
+
|
91
|
+
|
92
|
+
@compiles(date_trunc, "mysql")
|
93
|
+
def mysql_date_trunc(element, compiler, **kw):
|
94
|
+
pattern = {
|
95
|
+
"SECOND": "%Y-%m-%d %H:%i:%S",
|
96
|
+
"MINUTE": "%Y-%m-%d %H:%i",
|
97
|
+
"HOUR": "%Y-%m-%d %H",
|
98
|
+
"DAY": "%Y-%m-%d",
|
99
|
+
"MONTH": "%Y-%m",
|
100
|
+
"YEAR": "%Y",
|
101
|
+
}[element._time_resolution]
|
102
|
+
return f"DATE_FORMAT({compiler.process(element.clauses)}, '{pattern}')"
|
103
|
+
|
104
|
+
|
105
|
+
@compiles(date_trunc, "sqlite")
|
106
|
+
def sqlite_date_trunc(element, compiler, **kw):
|
107
|
+
pattern = {
|
108
|
+
"SECOND": "%Y-%m-%d %H:%M:%S",
|
109
|
+
"MINUTE": "%Y-%m-%d %H:%M",
|
110
|
+
"HOUR": "%Y-%m-%d %H",
|
111
|
+
"DAY": "%Y-%m-%d",
|
112
|
+
"MONTH": "%Y-%m",
|
113
|
+
"YEAR": "%Y",
|
114
|
+
}[element._time_resolution]
|
115
|
+
return f"strftime('{pattern}', {compiler.process(element.clauses)})"
|
116
|
+
|
117
|
+
|
118
|
+
def substract_date(**kwargs: float) -> datetime:
|
119
|
+
return datetime.now(tz=timezone.utc) - timedelta(**kwargs)
|
120
|
+
|
121
|
+
|
122
|
+
Column: partial[RawColumn] = partial(RawColumn, nullable=False)
|
123
|
+
NullColumn: partial[RawColumn] = partial(RawColumn, nullable=True)
|
124
|
+
DateNowColumn = partial(Column, type_=DateTime(timezone=True), server_default=utcnow())
|
125
|
+
|
126
|
+
|
127
|
+
def EnumColumn(enum_type, **kwargs):
|
128
|
+
return Column(Enum(enum_type, native_enum=False, length=16), **kwargs)
|
129
|
+
|
130
|
+
|
131
|
+
class SQLDBError(Exception):
|
132
|
+
pass
|
133
|
+
|
134
|
+
|
135
|
+
class SQLDBUnavailable(DBUnavailable, SQLDBError):
|
136
|
+
"""Used whenever we encounter a problem with the B connection."""
|
137
|
+
|
138
|
+
|
139
|
+
class BaseSQLDB(metaclass=ABCMeta):
|
140
|
+
"""This should be the base class of all the SQL DiracX DBs.
|
141
|
+
|
142
|
+
The details covered here should be handled automatically by the service and
|
143
|
+
task machinery of DiracX and this documentation exists for informational
|
144
|
+
purposes.
|
145
|
+
|
146
|
+
The available databases are discovered by calling `BaseSQLDB.available_urls`.
|
147
|
+
This method returns a mapping of database names to connection URLs. The
|
148
|
+
available databases are determined by the `diracx.dbs.sql` entrypoint in the
|
149
|
+
`pyproject.toml` file and the connection URLs are taken from the environment
|
150
|
+
variables of the form `DIRACX_DB_URL_<db-name>`.
|
151
|
+
|
152
|
+
If extensions to DiracX are being used, there can be multiple implementations
|
153
|
+
of the same database. To list the available implementations use
|
154
|
+
`BaseSQLDB.available_implementations(db_name)`. The first entry in this list
|
155
|
+
will be the preferred implementation and it can be initialized by calling
|
156
|
+
it's `__init__` function with a URL perviously obtained from
|
157
|
+
`BaseSQLDB.available_urls`.
|
158
|
+
|
159
|
+
To control the lifetime of the SQLAlchemy engine used for connecting to the
|
160
|
+
database, which includes the connection pool, the `BaseSQLDB.engine_context`
|
161
|
+
asynchronous context manager should be entered. When inside this context
|
162
|
+
manager, the engine can be accessed with `BaseSQLDB.engine`.
|
163
|
+
|
164
|
+
Upon entering, the DB class can then be used as an asynchronous context
|
165
|
+
manager to enter transactions. If an exception is raised the transaction is
|
166
|
+
rolled back automatically. If the inner context exits peacefully, the
|
167
|
+
transaction is committed automatically. When inside this context manager,
|
168
|
+
the DB connection can be accessed with `BaseSQLDB.conn`.
|
169
|
+
|
170
|
+
For example:
|
171
|
+
|
172
|
+
```python
|
173
|
+
db_name = ...
|
174
|
+
url = BaseSQLDB.available_urls()[db_name]
|
175
|
+
MyDBClass = BaseSQLDB.available_implementations(db_name)[0]
|
176
|
+
|
177
|
+
db = MyDBClass(url)
|
178
|
+
async with db.engine_context:
|
179
|
+
async with db:
|
180
|
+
# Do something in the first transaction
|
181
|
+
# Commit will be called automatically
|
182
|
+
|
183
|
+
async with db:
|
184
|
+
# This transaction will be rolled back due to the exception
|
185
|
+
raise Exception(...)
|
186
|
+
```
|
187
|
+
"""
|
188
|
+
|
189
|
+
# engine: AsyncEngine
|
190
|
+
# TODO: Make metadata an abstract property
|
191
|
+
metadata: MetaData
|
192
|
+
|
193
|
+
def __init__(self, db_url: str) -> None:
|
194
|
+
# We use a ContextVar to make sure that self._conn
|
195
|
+
# is specific to each context, and avoid parallel
|
196
|
+
# route executions to overlap
|
197
|
+
self._conn: ContextVar[AsyncConnection | None] = ContextVar(
|
198
|
+
"_conn", default=None
|
199
|
+
)
|
200
|
+
self._db_url = db_url
|
201
|
+
self._engine: AsyncEngine | None = None
|
202
|
+
|
203
|
+
@classmethod
|
204
|
+
def available_implementations(cls, db_name: str) -> list[type[BaseSQLDB]]:
|
205
|
+
"""Return the available implementations of the DB in reverse priority order."""
|
206
|
+
db_classes: list[type[BaseSQLDB]] = [
|
207
|
+
entry_point.load()
|
208
|
+
for entry_point in select_from_extension(
|
209
|
+
group="diracx.db.sql", name=db_name
|
210
|
+
)
|
211
|
+
]
|
212
|
+
if not db_classes:
|
213
|
+
raise NotImplementedError(f"Could not find any matches for {db_name=}")
|
214
|
+
return db_classes
|
215
|
+
|
216
|
+
@classmethod
|
217
|
+
def available_urls(cls) -> dict[str, str]:
|
218
|
+
"""Return a dict of available database urls.
|
219
|
+
|
220
|
+
The list of available URLs is determined by environment variables
|
221
|
+
prefixed with ``DIRACX_DB_URL_{DB_NAME}``.
|
222
|
+
"""
|
223
|
+
db_urls: dict[str, str] = {}
|
224
|
+
for entry_point in select_from_extension(group="diracx.db.sql"):
|
225
|
+
db_name = entry_point.name
|
226
|
+
var_name = f"DIRACX_DB_URL_{entry_point.name.upper()}"
|
227
|
+
if var_name in os.environ:
|
228
|
+
try:
|
229
|
+
db_url = os.environ[var_name]
|
230
|
+
if db_url == "sqlite+aiosqlite:///:memory:":
|
231
|
+
db_urls[db_name] = db_url
|
232
|
+
else:
|
233
|
+
db_urls[db_name] = str(
|
234
|
+
TypeAdapter(SqlalchemyDsn).validate_python(db_url)
|
235
|
+
)
|
236
|
+
except Exception:
|
237
|
+
logger.error("Error loading URL for %s", db_name)
|
238
|
+
raise
|
239
|
+
return db_urls
|
240
|
+
|
241
|
+
@classmethod
|
242
|
+
def transaction(cls) -> Self:
|
243
|
+
raise NotImplementedError("This should never be called")
|
244
|
+
|
245
|
+
@property
|
246
|
+
def engine(self) -> AsyncEngine:
|
247
|
+
"""The engine to use for database operations.
|
248
|
+
|
249
|
+
It is normally not necessary to use the engine directly, unless you are
|
250
|
+
doing something special, like writing a test fixture that gives you a db.
|
251
|
+
|
252
|
+
Requires that the engine_context has been entered.
|
253
|
+
"""
|
254
|
+
assert self._engine is not None, "engine_context must be entered"
|
255
|
+
return self._engine
|
256
|
+
|
257
|
+
@contextlib.asynccontextmanager
|
258
|
+
async def engine_context(self) -> AsyncIterator[None]:
|
259
|
+
"""Context manage to manage the engine lifecycle.
|
260
|
+
|
261
|
+
This is called once at the application startup (see ``lifetime_functions``).
|
262
|
+
"""
|
263
|
+
assert self._engine is None, "engine_context cannot be nested"
|
264
|
+
|
265
|
+
# Set the pool_recycle to 30mn
|
266
|
+
# That should prevent the problem of MySQL expiring connection
|
267
|
+
# after 60mn by default
|
268
|
+
engine = create_async_engine(self._db_url, pool_recycle=60 * 30)
|
269
|
+
self._engine = engine
|
270
|
+
try:
|
271
|
+
yield
|
272
|
+
finally:
|
273
|
+
self._engine = None
|
274
|
+
await engine.dispose()
|
275
|
+
|
276
|
+
@property
|
277
|
+
def conn(self) -> AsyncConnection:
|
278
|
+
if self._conn.get() is None:
|
279
|
+
raise RuntimeError(f"{self.__class__} was used before entering")
|
280
|
+
return cast(AsyncConnection, self._conn.get())
|
281
|
+
|
282
|
+
async def __aenter__(self) -> Self:
|
283
|
+
"""Create a connection.
|
284
|
+
|
285
|
+
This is called by the Dependency mechanism (see ``db_transaction``),
|
286
|
+
It will create a new connection/transaction for each route call.
|
287
|
+
"""
|
288
|
+
assert self._conn.get() is None, "BaseSQLDB context cannot be nested"
|
289
|
+
try:
|
290
|
+
self._conn.set(await self.engine.connect().__aenter__())
|
291
|
+
except Exception as e:
|
292
|
+
raise SQLDBUnavailable(
|
293
|
+
f"Cannot connect to {self.__class__.__name__}"
|
294
|
+
) from e
|
295
|
+
|
296
|
+
return self
|
297
|
+
|
298
|
+
async def __aexit__(self, exc_type, exc, tb):
|
299
|
+
"""This is called when exiting a route.
|
300
|
+
|
301
|
+
If there was no exception, the changes in the DB are committed.
|
302
|
+
Otherwise, they are rolled back.
|
303
|
+
"""
|
304
|
+
if exc_type is None:
|
305
|
+
await self._conn.get().commit()
|
306
|
+
await self._conn.get().__aexit__(exc_type, exc, tb)
|
307
|
+
self._conn.set(None)
|
308
|
+
|
309
|
+
async def ping(self):
|
310
|
+
"""Check whether the connection to the DB is still working.
|
311
|
+
|
312
|
+
We could enable the ``pre_ping`` in the engine, but this would be ran at
|
313
|
+
every query.
|
314
|
+
"""
|
315
|
+
try:
|
316
|
+
await self.conn.scalar(select(1))
|
317
|
+
except OperationalError as e:
|
318
|
+
raise SQLDBUnavailable("Cannot ping the DB") from e
|
319
|
+
|
320
|
+
|
321
|
+
def find_time_resolution(value):
|
322
|
+
if isinstance(value, datetime):
|
323
|
+
return None, value
|
324
|
+
if match := re.fullmatch(
|
325
|
+
r"\d{4}(-\d{2}(-\d{2}(([ T])\d{2}(:\d{2}(:\d{2}(\.\d{6}Z?)?)?)?)?)?)?", value
|
326
|
+
):
|
327
|
+
if match.group(6):
|
328
|
+
precision, pattern = "SECOND", r"\1-\2-\3 \4:\5:\6"
|
329
|
+
elif match.group(5):
|
330
|
+
precision, pattern = "MINUTE", r"\1-\2-\3 \4:\5"
|
331
|
+
elif match.group(3):
|
332
|
+
precision, pattern = "HOUR", r"\1-\2-\3 \4"
|
333
|
+
elif match.group(2):
|
334
|
+
precision, pattern = "DAY", r"\1-\2-\3"
|
335
|
+
elif match.group(1):
|
336
|
+
precision, pattern = "MONTH", r"\1-\2"
|
337
|
+
else:
|
338
|
+
precision, pattern = "YEAR", r"\1"
|
339
|
+
return (
|
340
|
+
precision,
|
341
|
+
re.sub(
|
342
|
+
r"^(\d{4})-?(\d{2})?-?(\d{2})?[ T]?(\d{2})?:?(\d{2})?:?(\d{2})?\.?(\d{6})?Z?$",
|
343
|
+
pattern,
|
344
|
+
value,
|
345
|
+
),
|
346
|
+
)
|
347
|
+
|
348
|
+
raise InvalidQueryError(f"Cannot parse {value=}")
|
349
|
+
|
350
|
+
|
351
|
+
def apply_search_filters(column_mapping, stmt, search):
|
352
|
+
for query in search:
|
353
|
+
try:
|
354
|
+
column = column_mapping(query["parameter"])
|
355
|
+
except KeyError as e:
|
356
|
+
raise InvalidQueryError(f"Unknown column {query['parameter']}") from e
|
357
|
+
|
358
|
+
if isinstance(column.type, DateTime):
|
359
|
+
if "value" in query and isinstance(query["value"], str):
|
360
|
+
resolution, value = find_time_resolution(query["value"])
|
361
|
+
if resolution:
|
362
|
+
column = date_trunc(column, time_resolution=resolution)
|
363
|
+
query["value"] = value
|
364
|
+
|
365
|
+
if query.get("values"):
|
366
|
+
resolutions, values = zip(
|
367
|
+
*map(find_time_resolution, query.get("values"))
|
368
|
+
)
|
369
|
+
if len(set(resolutions)) != 1:
|
370
|
+
raise InvalidQueryError(
|
371
|
+
f"Cannot mix different time resolutions in {query=}"
|
372
|
+
)
|
373
|
+
if resolution := resolutions[0]:
|
374
|
+
column = date_trunc(column, time_resolution=resolution)
|
375
|
+
query["values"] = values
|
376
|
+
|
377
|
+
if query["operator"] == "eq":
|
378
|
+
expr = column == query["value"]
|
379
|
+
elif query["operator"] == "neq":
|
380
|
+
expr = column != query["value"]
|
381
|
+
elif query["operator"] == "gt":
|
382
|
+
expr = column > query["value"]
|
383
|
+
elif query["operator"] == "lt":
|
384
|
+
expr = column < query["value"]
|
385
|
+
elif query["operator"] == "in":
|
386
|
+
expr = column.in_(query["values"])
|
387
|
+
elif query["operator"] == "not in":
|
388
|
+
expr = column.notin_(query["values"])
|
389
|
+
elif query["operator"] in "like":
|
390
|
+
expr = column.like(query["value"])
|
391
|
+
elif query["operator"] in "ilike":
|
392
|
+
expr = column.ilike(query["value"])
|
393
|
+
else:
|
394
|
+
raise InvalidQueryError(f"Unknown filter {query=}")
|
395
|
+
stmt = stmt.where(expr)
|
396
|
+
return stmt
|
397
|
+
|
398
|
+
|
399
|
+
def apply_sort_constraints(column_mapping, stmt, sorts):
|
400
|
+
sort_columns = []
|
401
|
+
for sort in sorts or []:
|
402
|
+
try:
|
403
|
+
column = column_mapping(sort["parameter"])
|
404
|
+
except KeyError as e:
|
405
|
+
raise InvalidQueryError(
|
406
|
+
f"Cannot sort by {sort['parameter']}: unknown column"
|
407
|
+
) from e
|
408
|
+
sorted_column = None
|
409
|
+
if sort["direction"] == SortDirection.ASC:
|
410
|
+
sorted_column = column.asc()
|
411
|
+
elif sort["direction"] == SortDirection.DESC:
|
412
|
+
sorted_column = column.desc()
|
413
|
+
else:
|
414
|
+
raise InvalidQueryError(f"Unknown sort {sort['direction']=}")
|
415
|
+
sort_columns.append(sorted_column)
|
416
|
+
if sort_columns:
|
417
|
+
stmt = stmt.order_by(*sort_columns)
|
418
|
+
return stmt
|
@@ -12,8 +12,8 @@ from diracx.core.models import (
|
|
12
12
|
ScalarSearchOperator,
|
13
13
|
SetJobStatusReturn,
|
14
14
|
)
|
15
|
-
|
16
|
-
from
|
15
|
+
|
16
|
+
from .. import JobDB, JobLoggingDB, SandboxMetadataDB, TaskQueueDB
|
17
17
|
|
18
18
|
|
19
19
|
async def set_job_status(
|
@@ -26,11 +26,10 @@ async def set_job_status(
|
|
26
26
|
"""Set various status fields for job specified by its jobId.
|
27
27
|
Set only the last status in the JobDB, updating all the status
|
28
28
|
logging information in the JobLoggingDB. The status dict has datetime
|
29
|
-
as a key and status information dictionary as values
|
29
|
+
as a key and status information dictionary as values.
|
30
30
|
|
31
31
|
:raises: JobNotFound if the job is not found in one of the DBs
|
32
32
|
"""
|
33
|
-
|
34
33
|
from DIRAC.Core.Utilities import TimeUtilities
|
35
34
|
from DIRAC.Core.Utilities.ReturnValues import returnValueOrRaise
|
36
35
|
from DIRAC.WorkloadManagementSystem.Utilities.JobStatusUtility import (
|
@@ -160,11 +159,10 @@ async def delete_jobs(
|
|
160
159
|
task_queue_db: TaskQueueDB,
|
161
160
|
background_task: BackgroundTasks,
|
162
161
|
):
|
163
|
-
"""
|
164
|
-
"Delete" jobs by removing them from the task queues, set kill as a job command setting the job status to DELETED.
|
165
|
-
:raises: BaseExceptionGroup[JobNotFound] for every job that was not found
|
166
|
-
"""
|
162
|
+
"""Removing jobs from task queues, send a kill command and set status to DELETED.
|
167
163
|
|
164
|
+
:raises: BaseExceptionGroup[JobNotFound] for every job that was not found.
|
165
|
+
"""
|
168
166
|
await _remove_jobs_from_task_queue(job_ids, config, task_queue_db, background_task)
|
169
167
|
# TODO: implement StorageManagerClient
|
170
168
|
# returnValueOrRaise(StorageManagerClient().killTasksBySourceTaskID(job_ids))
|
@@ -198,9 +196,8 @@ async def kill_jobs(
|
|
198
196
|
task_queue_db: TaskQueueDB,
|
199
197
|
background_task: BackgroundTasks,
|
200
198
|
):
|
201
|
-
"""
|
202
|
-
|
203
|
-
:raises: BaseExceptionGroup[JobNotFound] for every job that was not found
|
199
|
+
"""Kill jobs by removing them from the task queues, set kill as a job command and setting the job status to KILLED.
|
200
|
+
:raises: BaseExceptionGroup[JobNotFound] for every job that was not found.
|
204
201
|
"""
|
205
202
|
await _remove_jobs_from_task_queue(job_ids, config, task_queue_db, background_task)
|
206
203
|
# TODO: implement StorageManagerClient
|
@@ -259,11 +256,9 @@ async def remove_jobs(
|
|
259
256
|
task_queue_db: TaskQueueDB,
|
260
257
|
background_task: BackgroundTasks,
|
261
258
|
):
|
259
|
+
"""Fully remove a job from the WMS databases.
|
260
|
+
:raises: nothing.
|
262
261
|
"""
|
263
|
-
Fully remove a job from the WMS databases.
|
264
|
-
:raises: nothing
|
265
|
-
"""
|
266
|
-
|
267
262
|
# Remove the staging task from the StorageManager
|
268
263
|
# TODO: this was not done in the JobManagerHandler, but it was done in the kill method
|
269
264
|
# I think it should be done here too
|
@@ -290,9 +285,7 @@ async def _remove_jobs_from_task_queue(
|
|
290
285
|
task_queue_db: TaskQueueDB,
|
291
286
|
background_task: BackgroundTasks,
|
292
287
|
):
|
293
|
-
"""
|
294
|
-
Remove the job from TaskQueueDB
|
295
|
-
"""
|
288
|
+
"""Remove the job from TaskQueueDB."""
|
296
289
|
tq_infos = await task_queue_db.get_tq_infos_for_jobs(job_ids)
|
297
290
|
await task_queue_db.remove_jobs(job_ids)
|
298
291
|
for tq_id, owner, owner_group, vo in tq_infos:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: diracx-db
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.1a18
|
4
4
|
Summary: TODO
|
5
5
|
License: GPL-3.0-only
|
6
6
|
Classifier: Intended Audience :: Science/Research
|
@@ -8,14 +8,14 @@ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
8
8
|
Classifier: Programming Language :: Python :: 3
|
9
9
|
Classifier: Topic :: Scientific/Engineering
|
10
10
|
Classifier: Topic :: System :: Distributed Computing
|
11
|
-
Requires-Python: >=3.
|
11
|
+
Requires-Python: >=3.11
|
12
12
|
Description-Content-Type: text/markdown
|
13
13
|
Requires-Dist: dirac
|
14
14
|
Requires-Dist: diracx-core
|
15
15
|
Requires-Dist: fastapi
|
16
16
|
Requires-Dist: opensearch-py[async]
|
17
|
-
Requires-Dist: pydantic
|
18
|
-
Requires-Dist: sqlalchemy[aiomysql,aiosqlite]
|
17
|
+
Requires-Dist: pydantic>=2.4
|
18
|
+
Requires-Dist: sqlalchemy[aiomysql,aiosqlite]>=2
|
19
19
|
Provides-Extra: testing
|
20
|
-
Requires-Dist: diracx-testing
|
20
|
+
Requires-Dist: diracx-testing; extra == "testing"
|
21
21
|
|
@@ -0,0 +1,33 @@
|
|
1
|
+
diracx/db/__init__.py,sha256=2oeUeVwZq53bo_ZOflEYZsBn7tcR5Tzb2AIu0TAWELM,109
|
2
|
+
diracx/db/__main__.py,sha256=3yaUP1ig-yaPSQM4wy6CtSXXHivQg-hIz2FeBt7joBc,1714
|
3
|
+
diracx/db/exceptions.py,sha256=-LSkEwsvjwU7vXqx-xeLvLKInTRAhjwB7K_AKfQcIH8,41
|
4
|
+
diracx/db/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
|
+
diracx/db/os/__init__.py,sha256=IZr6z6SefrRvuC8sTC4RmB3_wwOyEt1GzpDuwSMH8O4,112
|
6
|
+
diracx/db/os/job_parameters.py,sha256=Knca19uT2G-5FI7MOFlaOAXeHn4ecPVLIH30TiwhaTw,858
|
7
|
+
diracx/db/os/utils.py,sha256=9UyhgMqaI8loh8chW2zHW-9JAOtH5YfktC-d-uY5Wnk,11346
|
8
|
+
diracx/db/sql/__init__.py,sha256=oJKlsWofwDMinJ5dkRy6BuP1IuVXgrmQ5HHBLpLf7no,304
|
9
|
+
diracx/db/sql/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
+
diracx/db/sql/auth/db.py,sha256=Wi4oeHCL4pPiaysZEx8R0KNk9BDxncAAtOD2qnD-NnY,10206
|
11
|
+
diracx/db/sql/auth/schema.py,sha256=W5whp1ZK_SNt-wxWVRBegmrc9IgqCR1LFY1FWwUlEBs,2828
|
12
|
+
diracx/db/sql/dummy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
13
|
+
diracx/db/sql/dummy/db.py,sha256=4Xyo7gUh_5b6Q2a_ggJG6e7fCtc9HrP_BRXfKGfqZIs,1642
|
14
|
+
diracx/db/sql/dummy/schema.py,sha256=uEkGDNVZbmJecytkHY1CO-M1MiKxe5w1_h0joJMPC9E,680
|
15
|
+
diracx/db/sql/job/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
16
|
+
diracx/db/sql/job/db.py,sha256=DaU1SGeXl7TqX1QxT1RldCeMIOYGnzNwkqBrwgGE90A,16248
|
17
|
+
diracx/db/sql/job/schema.py,sha256=3FKVc8BlPg08vAALcFS2HPGGu7QExlBuPIDOqEnBNvI,5519
|
18
|
+
diracx/db/sql/job_logging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
|
+
diracx/db/sql/job_logging/db.py,sha256=OGVHYIDcWhGYVfHacsz9DEPSoJ7aRbKVoQOyCCj8XvU,5036
|
20
|
+
diracx/db/sql/job_logging/schema.py,sha256=dD2arl-6bffeK8INT6tZ1HWEpJuYTx2iNiVzswVXXF8,812
|
21
|
+
diracx/db/sql/sandbox_metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
22
|
+
diracx/db/sql/sandbox_metadata/db.py,sha256=ZoiKjs-Rdd4p-kOZOLoUkpP0xGIp0oGjou3D2oP_6GE,6452
|
23
|
+
diracx/db/sql/sandbox_metadata/schema.py,sha256=rngYYkJxBhjETBHGLD1CTipDGe44mRYR0wdaFoAJwp0,1400
|
24
|
+
diracx/db/sql/task_queue/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
|
+
diracx/db/sql/task_queue/db.py,sha256=e6yauZO0nWaUVqjqQycH8iPO4wXLXaC82eaIq1K_KI8,9102
|
26
|
+
diracx/db/sql/task_queue/schema.py,sha256=fvzQyCw_xWAOWTLW6Qrp1m-WzEKb0tlYmafoLTbCy1I,3222
|
27
|
+
diracx/db/sql/utils/__init__.py,sha256=pS0886KOZZvGDurQU4peJ4RGSCMH9umCx95qp66OSpY,14793
|
28
|
+
diracx/db/sql/utils/job_status.py,sha256=GNQTKiyguhnB348mLIB7BT-PEOEKpKljR4JzvOd_h8M,10414
|
29
|
+
diracx_db-0.0.1a18.dist-info/METADATA,sha256=5SEgwo0STgA0X_475cghDUlzvqRVnmBipxztPG4Ya8E,688
|
30
|
+
diracx_db-0.0.1a18.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
31
|
+
diracx_db-0.0.1a18.dist-info/entry_points.txt,sha256=xEFGu_zgmPgQPlUeFtdahQfQIboJ1ugFOK8eMio9gtw,271
|
32
|
+
diracx_db-0.0.1a18.dist-info/top_level.txt,sha256=vJx10tdRlBX3rF2Psgk5jlwVGZNcL3m_7iQWwgPXt-U,7
|
33
|
+
diracx_db-0.0.1a18.dist-info/RECORD,,
|