diracx-db 0.0.1a12__py3-none-any.whl → 0.0.1a13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- diracx/db/sql/jobs/status_utility.py +1 -1
- diracx/db/sql/sandbox_metadata/db.py +85 -12
- {diracx_db-0.0.1a12.dist-info → diracx_db-0.0.1a13.dist-info}/METADATA +1 -1
- {diracx_db-0.0.1a12.dist-info → diracx_db-0.0.1a13.dist-info}/RECORD +7 -7
- {diracx_db-0.0.1a12.dist-info → diracx_db-0.0.1a13.dist-info}/WHEEL +0 -0
- {diracx_db-0.0.1a12.dist-info → diracx_db-0.0.1a13.dist-info}/entry_points.txt +0 -0
- {diracx_db-0.0.1a12.dist-info → diracx_db-0.0.1a13.dist-info}/top_level.txt +0 -0
@@ -272,7 +272,7 @@ async def remove_jobs(
|
|
272
272
|
|
273
273
|
# TODO: this was also not done in the JobManagerHandler, but it was done in the JobCleaningAgent
|
274
274
|
# I think it should be done here as well
|
275
|
-
await sandbox_metadata_db.
|
275
|
+
await sandbox_metadata_db.unassign_sandboxes_to_jobs(job_ids)
|
276
276
|
|
277
277
|
# Remove the job from TaskQueueDB
|
278
278
|
await _remove_jobs_from_task_queue(job_ids, config, task_queue_db, background_task)
|
@@ -1,9 +1,10 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
+
from typing import Any
|
4
|
+
|
3
5
|
import sqlalchemy
|
4
|
-
from sqlalchemy import delete
|
5
6
|
|
6
|
-
from diracx.core.models import SandboxInfo, UserInfo
|
7
|
+
from diracx.core.models import SandboxInfo, SandboxType, UserInfo
|
7
8
|
from diracx.db.sql.utils import BaseSQLDB, utcnow
|
8
9
|
|
9
10
|
from .schema import Base as SandboxMetadataDBBase
|
@@ -76,7 +77,7 @@ class SandboxMetadataDB(BaseSQLDB):
|
|
76
77
|
result = await self.conn.execute(stmt)
|
77
78
|
assert result.rowcount == 1
|
78
79
|
|
79
|
-
async def sandbox_is_assigned(self,
|
80
|
+
async def sandbox_is_assigned(self, pfn: str, se_name: str) -> bool:
|
80
81
|
"""Checks if a sandbox exists and has been assigned."""
|
81
82
|
stmt: sqlalchemy.Executable = sqlalchemy.select(sb_SandBoxes.Assigned).where(
|
82
83
|
sb_SandBoxes.SEName == se_name, sb_SandBoxes.SEPFN == pfn
|
@@ -84,13 +85,85 @@ class SandboxMetadataDB(BaseSQLDB):
|
|
84
85
|
result = await self.conn.execute(stmt)
|
85
86
|
is_assigned = result.scalar_one()
|
86
87
|
return is_assigned
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
"""
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
88
|
+
|
89
|
+
@staticmethod
|
90
|
+
def jobid_to_entity_id(job_id: int) -> str:
|
91
|
+
"""Define the entity id as 'Entity:entity_id' due to the DB definition"""
|
92
|
+
return f"Job:{job_id}"
|
93
|
+
|
94
|
+
async def get_sandbox_assigned_to_job(
|
95
|
+
self, job_id: int, sb_type: SandboxType
|
96
|
+
) -> list[Any]:
|
97
|
+
"""Get the sandbox assign to job"""
|
98
|
+
entity_id = self.jobid_to_entity_id(job_id)
|
99
|
+
stmt = (
|
100
|
+
sqlalchemy.select(sb_SandBoxes.SEPFN)
|
101
|
+
.where(sb_SandBoxes.SBId == sb_EntityMapping.SBId)
|
102
|
+
.where(
|
103
|
+
sb_EntityMapping.EntityId == entity_id,
|
104
|
+
sb_EntityMapping.Type == sb_type,
|
105
|
+
)
|
95
106
|
)
|
96
|
-
await self.conn.execute(stmt)
|
107
|
+
result = await self.conn.execute(stmt)
|
108
|
+
return [result.scalar()]
|
109
|
+
|
110
|
+
async def assign_sandbox_to_jobs(
|
111
|
+
self,
|
112
|
+
jobs_ids: list[int],
|
113
|
+
pfn: str,
|
114
|
+
sb_type: SandboxType,
|
115
|
+
se_name: str,
|
116
|
+
) -> None:
|
117
|
+
"""Mapp sandbox and jobs"""
|
118
|
+
for job_id in jobs_ids:
|
119
|
+
# Define the entity id as 'Entity:entity_id' due to the DB definition:
|
120
|
+
entity_id = self.jobid_to_entity_id(job_id)
|
121
|
+
select_sb_id = sqlalchemy.select(
|
122
|
+
sb_SandBoxes.SBId,
|
123
|
+
sqlalchemy.literal(entity_id).label("EntityId"),
|
124
|
+
sqlalchemy.literal(sb_type).label("Type"),
|
125
|
+
).where(
|
126
|
+
sb_SandBoxes.SEName == se_name,
|
127
|
+
sb_SandBoxes.SEPFN == pfn,
|
128
|
+
)
|
129
|
+
stmt = sqlalchemy.insert(sb_EntityMapping).from_select(
|
130
|
+
["SBId", "EntityId", "Type"], select_sb_id
|
131
|
+
)
|
132
|
+
await self.conn.execute(stmt)
|
133
|
+
|
134
|
+
stmt = (
|
135
|
+
sqlalchemy.update(sb_SandBoxes)
|
136
|
+
.where(sb_SandBoxes.SEPFN == pfn)
|
137
|
+
.values(Assigned=True)
|
138
|
+
)
|
139
|
+
result = await self.conn.execute(stmt)
|
140
|
+
assert result.rowcount == 1
|
141
|
+
|
142
|
+
async def unassign_sandboxes_to_jobs(self, jobs_ids: list[int]) -> None:
|
143
|
+
"""Delete mapping between jobs and sandboxes"""
|
144
|
+
for job_id in jobs_ids:
|
145
|
+
entity_id = self.jobid_to_entity_id(job_id)
|
146
|
+
sb_sel_stmt = sqlalchemy.select(
|
147
|
+
sb_SandBoxes.SBId,
|
148
|
+
).where(sb_EntityMapping.EntityId == entity_id)
|
149
|
+
|
150
|
+
result = await self.conn.execute(sb_sel_stmt)
|
151
|
+
sb_ids = [row.SBId for row in result]
|
152
|
+
|
153
|
+
del_stmt = sqlalchemy.delete(sb_EntityMapping).where(
|
154
|
+
sb_EntityMapping.EntityId == entity_id
|
155
|
+
)
|
156
|
+
await self.conn.execute(del_stmt)
|
157
|
+
|
158
|
+
sb_entity_sel_stmt = sqlalchemy.select(sb_EntityMapping.SBId).where(
|
159
|
+
sb_EntityMapping.SBId.in_(sb_ids)
|
160
|
+
)
|
161
|
+
result = await self.conn.execute(sb_entity_sel_stmt)
|
162
|
+
remaining_sb_ids = [row.SBId for row in result]
|
163
|
+
if not remaining_sb_ids:
|
164
|
+
unassign_stmt = (
|
165
|
+
sqlalchemy.update(sb_SandBoxes)
|
166
|
+
.where(sb_SandBoxes.SBId.in_(sb_ids))
|
167
|
+
.values(Assigned=False)
|
168
|
+
)
|
169
|
+
await self.conn.execute(unassign_stmt)
|
@@ -16,12 +16,12 @@ diracx/db/sql/dummy/schema.py,sha256=uEkGDNVZbmJecytkHY1CO-M1MiKxe5w1_h0joJMPC9E
|
|
16
16
|
diracx/db/sql/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
17
17
|
diracx/db/sql/jobs/db.py,sha256=Y_2mx5kPTeuz6nxXVwGLzTssKsIH6nfnoTvWvilSgxA,29876
|
18
18
|
diracx/db/sql/jobs/schema.py,sha256=YkxIdjTkvLlEZ9IQt86nj80eMvOPbcrfk9aisjmNpqY,9275
|
19
|
-
diracx/db/sql/jobs/status_utility.py,sha256=
|
19
|
+
diracx/db/sql/jobs/status_utility.py,sha256=_3Wdd11ShA4Z6HKr0_D_o8-zPZhdzgFpZSYAyYkH4Q0,10525
|
20
20
|
diracx/db/sql/sandbox_metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
21
|
-
diracx/db/sql/sandbox_metadata/db.py,sha256=
|
21
|
+
diracx/db/sql/sandbox_metadata/db.py,sha256=0EDFMfOW_O3pEPTShqBCME9z4j-JKpyYM6-BBccr27E,6303
|
22
22
|
diracx/db/sql/sandbox_metadata/schema.py,sha256=rngYYkJxBhjETBHGLD1CTipDGe44mRYR0wdaFoAJwp0,1400
|
23
|
-
diracx_db-0.0.
|
24
|
-
diracx_db-0.0.
|
25
|
-
diracx_db-0.0.
|
26
|
-
diracx_db-0.0.
|
27
|
-
diracx_db-0.0.
|
23
|
+
diracx_db-0.0.1a13.dist-info/METADATA,sha256=jmbXQvJykcvn3vGnxvO8GUGP3D3yjL-cXZwqXXJkzP4,681
|
24
|
+
diracx_db-0.0.1a13.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
25
|
+
diracx_db-0.0.1a13.dist-info/entry_points.txt,sha256=xEFGu_zgmPgQPlUeFtdahQfQIboJ1ugFOK8eMio9gtw,271
|
26
|
+
diracx_db-0.0.1a13.dist-info/top_level.txt,sha256=vJx10tdRlBX3rF2Psgk5jlwVGZNcL3m_7iQWwgPXt-U,7
|
27
|
+
diracx_db-0.0.1a13.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|