diracx-db 0.0.1a15__tar.gz → 0.0.1a17__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (43) hide show
  1. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/PKG-INFO +3 -3
  2. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/pyproject.toml +2 -2
  3. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/jobs/db.py +32 -12
  4. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/jobs/status_utility.py +2 -2
  5. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/utils.py +4 -2
  6. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx_db.egg-info/PKG-INFO +3 -3
  7. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx_db.egg-info/requires.txt +2 -2
  8. diracx_db-0.0.1a17/tests/jobs/test_jobDB.py +314 -0
  9. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/test_sandbox_metadata.py +5 -2
  10. diracx_db-0.0.1a15/tests/jobs/test_jobDB.py +0 -50
  11. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/README.md +0 -0
  12. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/setup.cfg +0 -0
  13. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/__init__.py +0 -0
  14. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/__main__.py +0 -0
  15. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/exceptions.py +0 -0
  16. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/os/__init__.py +0 -0
  17. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/os/job_parameters.py +0 -0
  18. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/os/utils.py +0 -0
  19. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/py.typed +0 -0
  20. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/__init__.py +0 -0
  21. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/auth/__init__.py +0 -0
  22. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/auth/db.py +0 -0
  23. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/auth/schema.py +0 -0
  24. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/dummy/__init__.py +0 -0
  25. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/dummy/db.py +0 -0
  26. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/dummy/schema.py +0 -0
  27. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/jobs/__init__.py +0 -0
  28. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/jobs/schema.py +0 -0
  29. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/sandbox_metadata/__init__.py +0 -0
  30. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/sandbox_metadata/db.py +0 -0
  31. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx/db/sql/sandbox_metadata/schema.py +0 -0
  32. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx_db.egg-info/SOURCES.txt +0 -0
  33. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx_db.egg-info/dependency_links.txt +0 -0
  34. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx_db.egg-info/entry_points.txt +0 -0
  35. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/src/diracx_db.egg-info/top_level.txt +0 -0
  36. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/auth/test_authorization_flow.py +0 -0
  37. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/auth/test_device_flow.py +0 -0
  38. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/auth/test_refresh_token.py +0 -0
  39. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/jobs/test_jobLoggingDB.py +0 -0
  40. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/opensearch/test_connection.py +0 -0
  41. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/opensearch/test_index_template.py +0 -0
  42. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/opensearch/test_search.py +0 -0
  43. {diracx_db-0.0.1a15 → diracx_db-0.0.1a17}/tests/test_dummyDB.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: diracx-db
3
- Version: 0.0.1a15
3
+ Version: 0.0.1a17
4
4
  Summary: TODO
5
5
  License: GPL-3.0-only
6
6
  Classifier: Intended Audience :: Science/Research
@@ -14,7 +14,7 @@ Requires-Dist: dirac
14
14
  Requires-Dist: diracx-core
15
15
  Requires-Dist: fastapi
16
16
  Requires-Dist: opensearch-py[async]
17
- Requires-Dist: pydantic
18
- Requires-Dist: sqlalchemy[aiomysql,aiosqlite]
17
+ Requires-Dist: pydantic>=2.4
18
+ Requires-Dist: sqlalchemy[aiomysql,aiosqlite]>=2
19
19
  Provides-Extra: testing
20
20
  Requires-Dist: diracx-testing; extra == "testing"
@@ -17,8 +17,8 @@ dependencies = [
17
17
  "diracx-core",
18
18
  "fastapi",
19
19
  "opensearch-py[async]",
20
- "pydantic",
21
- "sqlalchemy[aiomysql,aiosqlite]",
20
+ "pydantic >=2.4",
21
+ "sqlalchemy[aiomysql,aiosqlite] >= 2",
22
22
  ]
23
23
  dynamic = ["version"]
24
24
 
@@ -19,6 +19,9 @@ from diracx.core.models import (
19
19
  LimitedJobStatusReturn,
20
20
  ScalarSearchOperator,
21
21
  ScalarSearchSpec,
22
+ SearchSpec,
23
+ SortDirection,
24
+ SortSpec,
22
25
  )
23
26
  from diracx.core.properties import JOB_SHARING, SecurityProperty
24
27
 
@@ -83,14 +86,14 @@ class JobDB(BaseSQLDB):
83
86
 
84
87
  async def search(
85
88
  self,
86
- parameters,
87
- search,
88
- sorts,
89
+ parameters: list[str] | None,
90
+ search: list[SearchSpec],
91
+ sorts: list[SortSpec],
89
92
  *,
90
93
  distinct: bool = False,
91
94
  per_page: int = 100,
92
95
  page: int | None = None,
93
- ) -> list[dict[str, Any]]:
96
+ ) -> tuple[int, list[dict[Any, Any]]]:
94
97
  # Find which columns to select
95
98
  columns = _get_columns(Jobs.__table__, parameters)
96
99
  stmt = select(*columns)
@@ -98,28 +101,45 @@ class JobDB(BaseSQLDB):
98
101
  stmt = apply_search_filters(Jobs.__table__, stmt, search)
99
102
 
100
103
  # Apply any sort constraints
104
+ sort_columns = []
101
105
  for sort in sorts:
102
106
  if sort["parameter"] not in Jobs.__table__.columns:
103
107
  raise InvalidQueryError(
104
108
  f"Cannot sort by {sort['parameter']}: unknown column"
105
109
  )
106
110
  column = Jobs.__table__.columns[sort["parameter"]]
107
- if sort["direction"] == "asc":
108
- column = column.asc()
109
- elif sort["direction"] == "desc":
110
- column = column.desc()
111
+ sorted_column = None
112
+ if sort["direction"] == SortDirection.ASC:
113
+ sorted_column = column.asc()
114
+ elif sort["direction"] == SortDirection.DESC:
115
+ sorted_column = column.desc()
111
116
  else:
112
117
  raise InvalidQueryError(f"Unknown sort {sort['direction']=}")
118
+ sort_columns.append(sorted_column)
119
+
120
+ if sort_columns:
121
+ stmt = stmt.order_by(*sort_columns)
113
122
 
114
123
  if distinct:
115
124
  stmt = stmt.distinct()
116
125
 
126
+ # Calculate total count before applying pagination
127
+ total_count_subquery = stmt.alias()
128
+ total_count_stmt = select(func.count()).select_from(total_count_subquery)
129
+ total = (await self.conn.execute(total_count_stmt)).scalar_one()
130
+
117
131
  # Apply pagination
118
- if page:
119
- raise NotImplementedError("TODO Not yet implemented")
132
+ if page is not None:
133
+ if page < 1:
134
+ raise InvalidQueryError("Page must be a positive integer")
135
+ if per_page < 1:
136
+ raise InvalidQueryError("Per page must be a positive integer")
137
+ stmt = stmt.offset((page - 1) * per_page).limit(per_page)
120
138
 
121
139
  # Execute the query
122
- return [dict(row._mapping) async for row in (await self.conn.stream(stmt))]
140
+ return total, [
141
+ dict(row._mapping) async for row in (await self.conn.stream(stmt))
142
+ ]
123
143
 
124
144
  async def _insertNewJDL(self, jdl) -> int:
125
145
  from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL
@@ -314,7 +334,7 @@ class JobDB(BaseSQLDB):
314
334
  from DIRAC.Core.Utilities.ClassAd.ClassAdLight import ClassAd
315
335
  from DIRAC.Core.Utilities.ReturnValues import SErrorException
316
336
 
317
- result = await self.search(
337
+ _, result = await self.search(
318
338
  parameters=[
319
339
  "Status",
320
340
  "MinorStatus",
@@ -41,9 +41,9 @@ async def set_job_status(
41
41
  # transform JobStateUpdate objects into dicts
42
42
  statusDict = {}
43
43
  for key, value in status.items():
44
- statusDict[key] = {k: v for k, v in value.dict().items() if v is not None}
44
+ statusDict[key] = {k: v for k, v in value.model_dump().items() if v is not None}
45
45
 
46
- res = await job_db.search(
46
+ _, res = await job_db.search(
47
47
  parameters=["Status", "StartExecTime", "EndExecTime"],
48
48
  search=[
49
49
  {
@@ -11,7 +11,7 @@ from datetime import datetime, timedelta, timezone
11
11
  from functools import partial
12
12
  from typing import TYPE_CHECKING, AsyncIterator, Self, cast
13
13
 
14
- from pydantic import parse_obj_as
14
+ from pydantic import TypeAdapter
15
15
  from sqlalchemy import Column as RawColumn
16
16
  from sqlalchemy import DateTime, Enum, MetaData, select
17
17
  from sqlalchemy.exc import OperationalError
@@ -123,7 +123,9 @@ class BaseSQLDB(metaclass=ABCMeta):
123
123
  if db_url == "sqlite+aiosqlite:///:memory:":
124
124
  db_urls[db_name] = db_url
125
125
  else:
126
- db_urls[db_name] = parse_obj_as(SqlalchemyDsn, db_url)
126
+ db_urls[db_name] = str(
127
+ TypeAdapter(SqlalchemyDsn).validate_python(db_url)
128
+ )
127
129
  except Exception:
128
130
  logger.error("Error loading URL for %s", db_name)
129
131
  raise
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: diracx-db
3
- Version: 0.0.1a15
3
+ Version: 0.0.1a17
4
4
  Summary: TODO
5
5
  License: GPL-3.0-only
6
6
  Classifier: Intended Audience :: Science/Research
@@ -14,7 +14,7 @@ Requires-Dist: dirac
14
14
  Requires-Dist: diracx-core
15
15
  Requires-Dist: fastapi
16
16
  Requires-Dist: opensearch-py[async]
17
- Requires-Dist: pydantic
18
- Requires-Dist: sqlalchemy[aiomysql,aiosqlite]
17
+ Requires-Dist: pydantic>=2.4
18
+ Requires-Dist: sqlalchemy[aiomysql,aiosqlite]>=2
19
19
  Provides-Extra: testing
20
20
  Requires-Dist: diracx-testing; extra == "testing"
@@ -2,8 +2,8 @@ dirac
2
2
  diracx-core
3
3
  fastapi
4
4
  opensearch-py[async]
5
- pydantic
6
- sqlalchemy[aiomysql,aiosqlite]
5
+ pydantic>=2.4
6
+ sqlalchemy[aiomysql,aiosqlite]>=2
7
7
 
8
8
  [testing]
9
9
  diracx-testing
@@ -0,0 +1,314 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+
5
+ import pytest
6
+
7
+ from diracx.core.exceptions import InvalidQueryError, JobNotFound
8
+ from diracx.core.models import (
9
+ ScalarSearchOperator,
10
+ ScalarSearchSpec,
11
+ SortDirection,
12
+ SortSpec,
13
+ VectorSearchOperator,
14
+ VectorSearchSpec,
15
+ )
16
+ from diracx.db.sql.jobs.db import JobDB
17
+
18
+
19
+ @pytest.fixture
20
+ async def job_db(tmp_path):
21
+ job_db = JobDB("sqlite+aiosqlite:///:memory:")
22
+ async with job_db.engine_context():
23
+ async with job_db.engine.begin() as conn:
24
+ # set PRAGMA foreign_keys=ON if sqlite
25
+ if job_db._db_url.startswith("sqlite"):
26
+ await conn.exec_driver_sql("PRAGMA foreign_keys=ON")
27
+ await conn.run_sync(job_db.metadata.create_all)
28
+ yield job_db
29
+
30
+
31
+ async def test_search_parameters(job_db):
32
+ """Test that we can search specific parameters for jobs in the database."""
33
+ async with job_db as job_db:
34
+ total, result = await job_db.search(["JobID"], [], [])
35
+ assert total == 0
36
+ assert not result
37
+
38
+ result = await asyncio.gather(
39
+ *(
40
+ job_db.insert(
41
+ f"JDL{i}",
42
+ "owner",
43
+ "owner_group",
44
+ "New",
45
+ "dfdfds",
46
+ "lhcb",
47
+ )
48
+ for i in range(100)
49
+ )
50
+ )
51
+
52
+ async with job_db as job_db:
53
+ # Search a specific parameter: JobID
54
+ total, result = await job_db.search(["JobID"], [], [])
55
+ assert total == 100
56
+ assert result
57
+ for r in result:
58
+ assert r.keys() == {"JobID"}
59
+
60
+ # Search a specific parameter: Status
61
+ total, result = await job_db.search(["Status"], [], [])
62
+ assert total == 100
63
+ assert result
64
+ for r in result:
65
+ assert r.keys() == {"Status"}
66
+
67
+ # Search for multiple parameters: JobID, Status
68
+ total, result = await job_db.search(["JobID", "Status"], [], [])
69
+ assert total == 100
70
+ assert result
71
+ for r in result:
72
+ assert r.keys() == {"JobID", "Status"}
73
+
74
+ # Search for a specific parameter but use distinct: Status
75
+ total, result = await job_db.search(["Status"], [], [], distinct=True)
76
+ assert total == 1
77
+ assert result
78
+
79
+ # Search for a non-existent parameter: Dummy
80
+ with pytest.raises(InvalidQueryError):
81
+ total, result = await job_db.search(["Dummy"], [], [])
82
+
83
+
84
+ async def test_search_conditions(job_db):
85
+ """Test that we can search for specific jobs in the database."""
86
+ async with job_db as job_db:
87
+ result = await asyncio.gather(
88
+ *(
89
+ job_db.insert(
90
+ f"JDL{i}",
91
+ f"owner{i}",
92
+ "owner_group",
93
+ "New",
94
+ "dfdfds",
95
+ "lhcb",
96
+ )
97
+ for i in range(100)
98
+ )
99
+ )
100
+
101
+ async with job_db as job_db:
102
+ # Search a specific scalar condition: JobID eq 3
103
+ condition = ScalarSearchSpec(
104
+ parameter="JobID", operator=ScalarSearchOperator.EQUAL, value=3
105
+ )
106
+ total, result = await job_db.search([], [condition], [])
107
+ assert total == 1
108
+ assert result
109
+ assert len(result) == 1
110
+ assert result[0]["JobID"] == 3
111
+
112
+ # Search a specific scalar condition: JobID lt 3
113
+ condition = ScalarSearchSpec(
114
+ parameter="JobID", operator=ScalarSearchOperator.LESS_THAN, value=3
115
+ )
116
+ total, result = await job_db.search([], [condition], [])
117
+ assert total == 2
118
+ assert result
119
+ assert len(result) == 2
120
+ assert result[0]["JobID"] == 1
121
+ assert result[1]["JobID"] == 2
122
+
123
+ # Search a specific scalar condition: JobID neq 3
124
+ condition = ScalarSearchSpec(
125
+ parameter="JobID", operator=ScalarSearchOperator.NOT_EQUAL, value=3
126
+ )
127
+ total, result = await job_db.search([], [condition], [])
128
+ assert total == 99
129
+ assert result
130
+ assert len(result) == 99
131
+ assert all(r["JobID"] != 3 for r in result)
132
+
133
+ # Search a specific scalar condition: JobID eq 5873 (does not exist)
134
+ condition = ScalarSearchSpec(
135
+ parameter="JobID", operator=ScalarSearchOperator.EQUAL, value=5873
136
+ )
137
+ total, result = await job_db.search([], [condition], [])
138
+ assert not result
139
+
140
+ # Search a specific vector condition: JobID in 1,2,3
141
+ condition = VectorSearchSpec(
142
+ parameter="JobID", operator=VectorSearchOperator.IN, values=[1, 2, 3]
143
+ )
144
+ total, result = await job_db.search([], [condition], [])
145
+ assert total == 3
146
+ assert result
147
+ assert len(result) == 3
148
+ assert all(r["JobID"] in [1, 2, 3] for r in result)
149
+
150
+ # Search a specific vector condition: JobID in 1,2,5873 (one of them does not exist)
151
+ condition = VectorSearchSpec(
152
+ parameter="JobID", operator=VectorSearchOperator.IN, values=[1, 2, 5873]
153
+ )
154
+ total, result = await job_db.search([], [condition], [])
155
+ assert total == 2
156
+ assert result
157
+ assert len(result) == 2
158
+ assert all(r["JobID"] in [1, 2] for r in result)
159
+
160
+ # Search for multiple conditions based on different parameters: JobID eq 70, JobID in 4,5,6
161
+ condition1 = ScalarSearchSpec(
162
+ parameter="Owner", operator=ScalarSearchOperator.EQUAL, value="owner4"
163
+ )
164
+ condition2 = VectorSearchSpec(
165
+ parameter="JobID", operator=VectorSearchOperator.IN, values=[4, 5, 6]
166
+ )
167
+ total, result = await job_db.search([], [condition1, condition2], [])
168
+ assert total == 1
169
+ assert result
170
+ assert len(result) == 1
171
+ assert result[0]["JobID"] == 5
172
+ assert result[0]["Owner"] == "owner4"
173
+
174
+ # Search for multiple conditions based on the same parameter: JobID eq 70, JobID in 4,5,6
175
+ condition1 = ScalarSearchSpec(
176
+ parameter="JobID", operator=ScalarSearchOperator.EQUAL, value=70
177
+ )
178
+ condition2 = VectorSearchSpec(
179
+ parameter="JobID", operator=VectorSearchOperator.IN, values=[4, 5, 6]
180
+ )
181
+ total, result = await job_db.search([], [condition1, condition2], [])
182
+ assert total == 0
183
+ assert not result
184
+
185
+
186
+ async def test_search_sorts(job_db):
187
+ """Test that we can search for jobs in the database and sort the results."""
188
+ async with job_db as job_db:
189
+ result = await asyncio.gather(
190
+ *(
191
+ job_db.insert(
192
+ f"JDL{i}",
193
+ f"owner{i}",
194
+ "owner_group1" if i < 50 else "owner_group2",
195
+ "New",
196
+ "dfdfds",
197
+ "lhcb",
198
+ )
199
+ for i in range(100)
200
+ )
201
+ )
202
+
203
+ async with job_db as job_db:
204
+ # Search and sort by JobID in ascending order
205
+ sort = SortSpec(parameter="JobID", direction=SortDirection.ASC)
206
+ total, result = await job_db.search([], [], [sort])
207
+ assert total == 100
208
+ assert result
209
+ for i, r in enumerate(result):
210
+ assert r["JobID"] == i + 1
211
+
212
+ # Search and sort by JobID in descending order
213
+ sort = SortSpec(parameter="JobID", direction=SortDirection.DESC)
214
+ total, result = await job_db.search([], [], [sort])
215
+ assert total == 100
216
+ assert result
217
+ for i, r in enumerate(result):
218
+ assert r["JobID"] == 100 - i
219
+
220
+ # Search and sort by Owner in ascending order
221
+ sort = SortSpec(parameter="Owner", direction=SortDirection.ASC)
222
+ total, result = await job_db.search([], [], [sort])
223
+ assert total == 100
224
+ assert result
225
+ # Assert that owner10 is before owner2 because of the lexicographical order
226
+ assert result[2]["Owner"] == "owner10"
227
+ assert result[12]["Owner"] == "owner2"
228
+
229
+ # Search and sort by Owner in descending order
230
+ sort = SortSpec(parameter="Owner", direction=SortDirection.DESC)
231
+ total, result = await job_db.search([], [], [sort])
232
+ assert total == 100
233
+ assert result
234
+ # Assert that owner10 is before owner2 because of the lexicographical order
235
+ assert result[97]["Owner"] == "owner10"
236
+ assert result[87]["Owner"] == "owner2"
237
+
238
+ # Search and sort by OwnerGroup in ascending order and JobID in descending order
239
+ sort1 = SortSpec(parameter="OwnerGroup", direction=SortDirection.ASC)
240
+ sort2 = SortSpec(parameter="JobID", direction=SortDirection.DESC)
241
+ total, result = await job_db.search([], [], [sort1, sort2])
242
+ assert total == 100
243
+ assert result
244
+ assert result[0]["OwnerGroup"] == "owner_group1"
245
+ assert result[0]["JobID"] == 50
246
+ assert result[99]["OwnerGroup"] == "owner_group2"
247
+ assert result[99]["JobID"] == 51
248
+
249
+
250
+ async def test_search_pagination(job_db):
251
+ """Test that we can search for jobs in the database."""
252
+ async with job_db as job_db:
253
+ result = await asyncio.gather(
254
+ *(
255
+ job_db.insert(
256
+ f"JDL{i}",
257
+ f"owner{i}",
258
+ "owner_group1" if i < 50 else "owner_group2",
259
+ "New",
260
+ "dfdfds",
261
+ "lhcb",
262
+ )
263
+ for i in range(100)
264
+ )
265
+ )
266
+
267
+ async with job_db as job_db:
268
+ # Search for the first 10 jobs
269
+ total, result = await job_db.search([], [], [], per_page=10, page=1)
270
+ assert total == 100
271
+ assert result
272
+ assert len(result) == 10
273
+ assert result[0]["JobID"] == 1
274
+
275
+ # Search for the second 10 jobs
276
+ total, result = await job_db.search([], [], [], per_page=10, page=2)
277
+ assert total == 100
278
+ assert result
279
+ assert len(result) == 10
280
+ assert result[0]["JobID"] == 11
281
+
282
+ # Search for the last 10 jobs
283
+ total, result = await job_db.search([], [], [], per_page=10, page=10)
284
+ assert total == 100
285
+ assert result
286
+ assert len(result) == 10
287
+ assert result[0]["JobID"] == 91
288
+
289
+ # Search for the second 50 jobs
290
+ total, result = await job_db.search([], [], [], per_page=50, page=2)
291
+ assert total == 100
292
+ assert result
293
+ assert len(result) == 50
294
+ assert result[0]["JobID"] == 51
295
+
296
+ # Invalid page number
297
+ total, result = await job_db.search([], [], [], per_page=10, page=11)
298
+ assert total == 100
299
+ assert not result
300
+
301
+ # Invalid page number
302
+ with pytest.raises(InvalidQueryError):
303
+ result = await job_db.search([], [], [], per_page=10, page=0)
304
+
305
+ # Invalid per_page number
306
+ with pytest.raises(InvalidQueryError):
307
+ result = await job_db.search([], [], [], per_page=0, page=1)
308
+
309
+
310
+ async def test_set_job_command_invalid_job_id(job_db: JobDB):
311
+ """Test that setting a command for a non-existent job raises JobNotFound."""
312
+ async with job_db as job_db:
313
+ with pytest.raises(JobNotFound):
314
+ await job_db.set_job_command(123456, "test_command")
@@ -26,13 +26,16 @@ def test_get_pfn(sandbox_metadata_db: SandboxMetadataDB):
26
26
  sub="vo:sub", preferred_username="user1", dirac_group="group1", vo="vo"
27
27
  )
28
28
  sandbox_info = SandboxInfo(
29
- checksum="checksum",
29
+ checksum="90e0ba6763c91a905bb9fd6e025aac1952ae742e6d756a31a0963aa7df7cd7b1",
30
30
  checksum_algorithm="sha256",
31
31
  format="tar.bz2",
32
32
  size=100,
33
33
  )
34
34
  pfn = sandbox_metadata_db.get_pfn("bucket1", user_info, sandbox_info)
35
- assert pfn == "/S3/bucket1/vo/group1/user1/sha256:checksum.tar.bz2"
35
+ assert pfn == (
36
+ "/S3/bucket1/vo/group1/user1/"
37
+ "sha256:90e0ba6763c91a905bb9fd6e025aac1952ae742e6d756a31a0963aa7df7cd7b1.tar.bz2"
38
+ )
36
39
 
37
40
 
38
41
  async def test_insert_sandbox(sandbox_metadata_db: SandboxMetadataDB):
@@ -1,50 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import asyncio
4
-
5
- import pytest
6
-
7
- from diracx.core.exceptions import JobNotFound
8
- from diracx.db.sql.jobs.db import JobDB
9
-
10
-
11
- @pytest.fixture
12
- async def job_db(tmp_path):
13
- job_db = JobDB("sqlite+aiosqlite:///:memory:")
14
- async with job_db.engine_context():
15
- async with job_db.engine.begin() as conn:
16
- # set PRAGMA foreign_keys=ON if sqlite
17
- if job_db._db_url.startswith("sqlite"):
18
- await conn.exec_driver_sql("PRAGMA foreign_keys=ON")
19
- await conn.run_sync(job_db.metadata.create_all)
20
- yield job_db
21
-
22
-
23
- async def test_some_asyncio_code(job_db):
24
- async with job_db as job_db:
25
- result = await job_db.search(["JobID"], [], [])
26
- assert not result
27
-
28
- result = await asyncio.gather(
29
- *(
30
- job_db.insert(
31
- f"JDL{i}",
32
- "owner",
33
- "owner_group",
34
- "New",
35
- "dfdfds",
36
- "lhcb",
37
- )
38
- for i in range(100)
39
- )
40
- )
41
-
42
- async with job_db as job_db:
43
- result = await job_db.search(["JobID"], [], [])
44
- assert result
45
-
46
-
47
- async def test_set_job_command_invalid_job_id(job_db: JobDB):
48
- async with job_db as job_db:
49
- with pytest.raises(JobNotFound):
50
- await job_db.set_job_command(123456, "test_command")
File without changes
File without changes