diracx-db 0.0.1a17__py3-none-any.whl → 0.0.1a19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (75.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -5,5 +5,6 @@ JobParametersDB = diracx.db.os:JobParametersDB
5
5
  AuthDB = diracx.db.sql:AuthDB
6
6
  JobDB = diracx.db.sql:JobDB
7
7
  JobLoggingDB = diracx.db.sql:JobLoggingDB
8
+ PilotAgentsDB = diracx.db.sql:PilotAgentsDB
8
9
  SandboxMetadataDB = diracx.db.sql:SandboxMetadataDB
9
10
  TaskQueueDB = diracx.db.sql:TaskQueueDB
@@ -1,290 +0,0 @@
1
- import sqlalchemy.types as types
2
- from sqlalchemy import (
3
- BigInteger,
4
- Boolean,
5
- DateTime,
6
- Enum,
7
- Float,
8
- ForeignKey,
9
- Index,
10
- Integer,
11
- Numeric,
12
- PrimaryKeyConstraint,
13
- String,
14
- Text,
15
- )
16
- from sqlalchemy.orm import declarative_base
17
-
18
- from ..utils import Column, DateNowColumn, NullColumn
19
-
20
- JobDBBase = declarative_base()
21
- JobLoggingDBBase = declarative_base()
22
- TaskQueueDBBase = declarative_base()
23
-
24
-
25
- class EnumBackedBool(types.TypeDecorator):
26
- """Maps a ``EnumBackedBool()`` column to True/False in Python"""
27
-
28
- impl = types.Enum
29
- cache_ok: bool = True
30
-
31
- def __init__(self) -> None:
32
- super().__init__("True", "False")
33
-
34
- def process_bind_param(self, value, dialect) -> str:
35
- if value is True:
36
- return "True"
37
- elif value is False:
38
- return "False"
39
- else:
40
- raise NotImplementedError(value, dialect)
41
-
42
- def process_result_value(self, value, dialect) -> bool:
43
- if value == "True":
44
- return True
45
- elif value == "False":
46
- return False
47
- else:
48
- raise NotImplementedError(f"Unknown {value=}")
49
-
50
-
51
- class Jobs(JobDBBase):
52
- __tablename__ = "Jobs"
53
-
54
- JobID = Column(
55
- "JobID",
56
- Integer,
57
- ForeignKey("JobJDLs.JobID", ondelete="CASCADE"),
58
- primary_key=True,
59
- default=0,
60
- )
61
- JobType = Column("JobType", String(32), default="user")
62
- JobGroup = Column("JobGroup", String(32), default="00000000")
63
- Site = Column("Site", String(100), default="ANY")
64
- JobName = Column("JobName", String(128), default="Unknown")
65
- Owner = Column("Owner", String(64), default="Unknown")
66
- OwnerGroup = Column("OwnerGroup", String(128), default="Unknown")
67
- VO = Column("VO", String(32))
68
- SubmissionTime = NullColumn("SubmissionTime", DateTime)
69
- RescheduleTime = NullColumn("RescheduleTime", DateTime)
70
- LastUpdateTime = NullColumn("LastUpdateTime", DateTime)
71
- StartExecTime = NullColumn("StartExecTime", DateTime)
72
- HeartBeatTime = NullColumn("HeartBeatTime", DateTime)
73
- EndExecTime = NullColumn("EndExecTime", DateTime)
74
- Status = Column("Status", String(32), default="Received")
75
- MinorStatus = Column("MinorStatus", String(128), default="Unknown")
76
- ApplicationStatus = Column("ApplicationStatus", String(255), default="Unknown")
77
- UserPriority = Column("UserPriority", Integer, default=0)
78
- RescheduleCounter = Column("RescheduleCounter", Integer, default=0)
79
- VerifiedFlag = Column("VerifiedFlag", EnumBackedBool(), default=False)
80
- # TODO: Should this be True/False/"Failed"? Or True/False/Null?
81
- AccountedFlag = Column(
82
- "AccountedFlag", Enum("True", "False", "Failed"), default="False"
83
- )
84
-
85
- __table_args__ = (
86
- Index("JobType", "JobType"),
87
- Index("JobGroup", "JobGroup"),
88
- Index("Site", "Site"),
89
- Index("Owner", "Owner"),
90
- Index("OwnerGroup", "OwnerGroup"),
91
- Index("Status", "Status"),
92
- Index("MinorStatus", "MinorStatus"),
93
- Index("ApplicationStatus", "ApplicationStatus"),
94
- Index("StatusSite", "Status", "Site"),
95
- Index("LastUpdateTime", "LastUpdateTime"),
96
- )
97
-
98
-
99
- class JobJDLs(JobDBBase):
100
- __tablename__ = "JobJDLs"
101
- JobID = Column(Integer, autoincrement=True, primary_key=True)
102
- JDL = Column(Text)
103
- JobRequirements = Column(Text)
104
- OriginalJDL = Column(Text)
105
-
106
-
107
- class InputData(JobDBBase):
108
- __tablename__ = "InputData"
109
- JobID = Column(
110
- Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
111
- )
112
- LFN = Column(String(255), default="", primary_key=True)
113
- Status = Column(String(32), default="AprioriGood")
114
-
115
-
116
- class JobParameters(JobDBBase):
117
- __tablename__ = "JobParameters"
118
- JobID = Column(
119
- Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
120
- )
121
- Name = Column(String(100), primary_key=True)
122
- Value = Column(Text)
123
-
124
-
125
- class OptimizerParameters(JobDBBase):
126
- __tablename__ = "OptimizerParameters"
127
- JobID = Column(
128
- Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
129
- )
130
- Name = Column(String(100), primary_key=True)
131
- Value = Column(Text)
132
-
133
-
134
- class AtticJobParameters(JobDBBase):
135
- __tablename__ = "AtticJobParameters"
136
- JobID = Column(
137
- Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
138
- )
139
- Name = Column(String(100), primary_key=True)
140
- Value = Column(Text)
141
- RescheduleCycle = Column(Integer)
142
-
143
-
144
- class SiteMask(JobDBBase):
145
- __tablename__ = "SiteMask"
146
- Site = Column(String(64), primary_key=True)
147
- Status = Column(String(64))
148
- LastUpdateTime = Column(DateTime)
149
- Author = Column(String(255))
150
- Comment = Column(Text)
151
-
152
-
153
- class SiteMaskLogging(JobDBBase):
154
- __tablename__ = "SiteMaskLogging"
155
- Site = Column(String(64), primary_key=True)
156
- UpdateTime = Column(DateTime, primary_key=True)
157
- Status = Column(String(64))
158
- Author = Column(String(255))
159
- Comment = Column(Text)
160
-
161
-
162
- class HeartBeatLoggingInfo(JobDBBase):
163
- __tablename__ = "HeartBeatLoggingInfo"
164
- JobID = Column(
165
- Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
166
- )
167
- Name = Column(String(100), primary_key=True)
168
- Value = Column(Text)
169
- HeartBeatTime = Column(DateTime, primary_key=True)
170
-
171
-
172
- class JobCommands(JobDBBase):
173
- __tablename__ = "JobCommands"
174
- JobID = Column(
175
- Integer, ForeignKey("Jobs.JobID", ondelete="CASCADE"), primary_key=True
176
- )
177
- Command = Column(String(100))
178
- Arguments = Column(String(100))
179
- Status = Column(String(64), default="Received")
180
- ReceptionTime = Column(DateTime, primary_key=True)
181
- ExecutionTime = NullColumn(DateTime)
182
-
183
-
184
- class LoggingInfo(JobLoggingDBBase):
185
- __tablename__ = "LoggingInfo"
186
- JobID = Column(Integer)
187
- SeqNum = Column(Integer)
188
- Status = Column(String(32), default="")
189
- MinorStatus = Column(String(128), default="")
190
- ApplicationStatus = Column(String(255), default="")
191
- StatusTime = DateNowColumn()
192
- # TODO: Check that this corresponds to the DOUBLE(12,3) type in MySQL
193
- StatusTimeOrder = Column(Numeric(precision=12, scale=3), default=0)
194
- Source = Column(String(32), default="Unknown", name="StatusSource")
195
- __table_args__ = (PrimaryKeyConstraint("JobID", "SeqNum"),)
196
-
197
-
198
- class TaskQueues(TaskQueueDBBase):
199
- __tablename__ = "tq_TaskQueues"
200
- TQId = Column(Integer, primary_key=True)
201
- Owner = Column(String(255), nullable=False)
202
- OwnerGroup = Column(String(32), nullable=False)
203
- VO = Column(String(32), nullable=False)
204
- CPUTime = Column(BigInteger, nullable=False)
205
- Priority = Column(Float, nullable=False)
206
- Enabled = Column(Boolean, nullable=False, default=0)
207
- __table_args__ = (Index("TQOwner", "Owner", "OwnerGroup", "CPUTime"),)
208
-
209
-
210
- class JobsQueue(TaskQueueDBBase):
211
- __tablename__ = "tq_Jobs"
212
- TQId = Column(
213
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
214
- )
215
- JobId = Column(Integer, primary_key=True)
216
- Priority = Column(Integer, nullable=False)
217
- RealPriority = Column(Float, nullable=False)
218
- __table_args__ = (Index("TaskIndex", "TQId"),)
219
-
220
-
221
- class SitesQueue(TaskQueueDBBase):
222
- __tablename__ = "tq_TQToSites"
223
- TQId = Column(
224
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
225
- )
226
- Value = Column(String(64), primary_key=True)
227
- __table_args__ = (
228
- Index("SitesTaskIndex", "TQId"),
229
- Index("SitesIndex", "Value"),
230
- )
231
-
232
-
233
- class GridCEsQueue(TaskQueueDBBase):
234
- __tablename__ = "tq_TQToGridCEs"
235
- TQId = Column(
236
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
237
- )
238
- Value = Column(String(64), primary_key=True)
239
- __table_args__ = (
240
- Index("GridCEsTaskIndex", "TQId"),
241
- Index("GridCEsValueIndex", "Value"),
242
- )
243
-
244
-
245
- class BannedSitesQueue(TaskQueueDBBase):
246
- __tablename__ = "tq_TQToBannedSites"
247
- TQId = Column(
248
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
249
- )
250
- Value = Column(String(64), primary_key=True)
251
- __table_args__ = (
252
- Index("BannedSitesTaskIndex", "TQId"),
253
- Index("BannedSitesValueIndex", "Value"),
254
- )
255
-
256
-
257
- class PlatformsQueue(TaskQueueDBBase):
258
- __tablename__ = "tq_TQToPlatforms"
259
- TQId = Column(
260
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
261
- )
262
- Value = Column(String(64), primary_key=True)
263
- __table_args__ = (
264
- Index("PlatformsTaskIndex", "TQId"),
265
- Index("PlatformsValueIndex", "Value"),
266
- )
267
-
268
-
269
- class JobTypesQueue(TaskQueueDBBase):
270
- __tablename__ = "tq_TQToJobTypes"
271
- TQId = Column(
272
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
273
- )
274
- Value = Column(String(64), primary_key=True)
275
- __table_args__ = (
276
- Index("JobTypesTaskIndex", "TQId"),
277
- Index("JobTypesValueIndex", "Value"),
278
- )
279
-
280
-
281
- class TagsQueue(TaskQueueDBBase):
282
- __tablename__ = "tq_TQToTags"
283
- TQId = Column(
284
- Integer, ForeignKey("tq_TaskQueues.TQId", ondelete="CASCADE"), primary_key=True
285
- )
286
- Value = Column(String(64), primary_key=True)
287
- __table_args__ = (
288
- Index("TagsTaskIndex", "TQId"),
289
- Index("TagsValueIndex", "Value"),
290
- )
diracx/db/sql/utils.py DELETED
@@ -1,236 +0,0 @@
1
- from __future__ import annotations
2
-
3
- __all__ = ("utcnow", "Column", "NullColumn", "DateNowColumn", "BaseSQLDB")
4
-
5
- import contextlib
6
- import logging
7
- import os
8
- from abc import ABCMeta
9
- from contextvars import ContextVar
10
- from datetime import datetime, timedelta, timezone
11
- from functools import partial
12
- from typing import TYPE_CHECKING, AsyncIterator, Self, cast
13
-
14
- from pydantic import TypeAdapter
15
- from sqlalchemy import Column as RawColumn
16
- from sqlalchemy import DateTime, Enum, MetaData, select
17
- from sqlalchemy.exc import OperationalError
18
- from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, create_async_engine
19
- from sqlalchemy.ext.compiler import compiles
20
- from sqlalchemy.sql import expression
21
-
22
- from diracx.core.exceptions import InvalidQueryError
23
- from diracx.core.extensions import select_from_extension
24
- from diracx.core.settings import SqlalchemyDsn
25
- from diracx.db.exceptions import DBUnavailable
26
-
27
- if TYPE_CHECKING:
28
- from sqlalchemy.types import TypeEngine
29
-
30
- logger = logging.getLogger(__name__)
31
-
32
-
33
- class utcnow(expression.FunctionElement):
34
- type: TypeEngine = DateTime()
35
- inherit_cache: bool = True
36
-
37
-
38
- @compiles(utcnow, "postgresql")
39
- def pg_utcnow(element, compiler, **kw) -> str:
40
- return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
41
-
42
-
43
- @compiles(utcnow, "mssql")
44
- def ms_utcnow(element, compiler, **kw) -> str:
45
- return "GETUTCDATE()"
46
-
47
-
48
- @compiles(utcnow, "mysql")
49
- def mysql_utcnow(element, compiler, **kw) -> str:
50
- return "(UTC_TIMESTAMP)"
51
-
52
-
53
- @compiles(utcnow, "sqlite")
54
- def sqlite_utcnow(element, compiler, **kw) -> str:
55
- return "DATETIME('now')"
56
-
57
-
58
- def substract_date(**kwargs: float) -> datetime:
59
- return datetime.now(tz=timezone.utc) - timedelta(**kwargs)
60
-
61
-
62
- Column: partial[RawColumn] = partial(RawColumn, nullable=False)
63
- NullColumn: partial[RawColumn] = partial(RawColumn, nullable=True)
64
- DateNowColumn = partial(Column, DateTime(timezone=True), server_default=utcnow())
65
-
66
-
67
- def EnumColumn(enum_type, **kwargs):
68
- return Column(Enum(enum_type, native_enum=False, length=16), **kwargs)
69
-
70
-
71
- class SQLDBError(Exception):
72
- pass
73
-
74
-
75
- class SQLDBUnavailable(DBUnavailable, SQLDBError):
76
- """Used whenever we encounter a problem with the B connection"""
77
-
78
-
79
- class BaseSQLDB(metaclass=ABCMeta):
80
- """This should be the base class of all the DiracX DBs"""
81
-
82
- # engine: AsyncEngine
83
- # TODO: Make metadata an abstract property
84
- metadata: MetaData
85
-
86
- def __init__(self, db_url: str) -> None:
87
- # We use a ContextVar to make sure that self._conn
88
- # is specific to each context, and avoid parallel
89
- # route executions to overlap
90
- self._conn: ContextVar[AsyncConnection | None] = ContextVar(
91
- "_conn", default=None
92
- )
93
- self._db_url = db_url
94
- self._engine: AsyncEngine | None = None
95
-
96
- @classmethod
97
- def available_implementations(cls, db_name: str) -> list[type[BaseSQLDB]]:
98
- """Return the available implementations of the DB in reverse priority order."""
99
- db_classes: list[type[BaseSQLDB]] = [
100
- entry_point.load()
101
- for entry_point in select_from_extension(
102
- group="diracx.db.sql", name=db_name
103
- )
104
- ]
105
- if not db_classes:
106
- raise NotImplementedError(f"Could not find any matches for {db_name=}")
107
- return db_classes
108
-
109
- @classmethod
110
- def available_urls(cls) -> dict[str, str]:
111
- """Return a dict of available database urls.
112
-
113
- The list of available URLs is determined by environment variables
114
- prefixed with ``DIRACX_DB_URL_{DB_NAME}``.
115
- """
116
- db_urls: dict[str, str] = {}
117
- for entry_point in select_from_extension(group="diracx.db.sql"):
118
- db_name = entry_point.name
119
- var_name = f"DIRACX_DB_URL_{entry_point.name.upper()}"
120
- if var_name in os.environ:
121
- try:
122
- db_url = os.environ[var_name]
123
- if db_url == "sqlite+aiosqlite:///:memory:":
124
- db_urls[db_name] = db_url
125
- else:
126
- db_urls[db_name] = str(
127
- TypeAdapter(SqlalchemyDsn).validate_python(db_url)
128
- )
129
- except Exception:
130
- logger.error("Error loading URL for %s", db_name)
131
- raise
132
- return db_urls
133
-
134
- @classmethod
135
- def transaction(cls) -> Self:
136
- raise NotImplementedError("This should never be called")
137
-
138
- @property
139
- def engine(self) -> AsyncEngine:
140
- """The engine to use for database operations.
141
-
142
- It is normally not necessary to use the engine directly,
143
- unless you are doing something special, like writing a
144
- test fixture that gives you a db.
145
-
146
-
147
- Requires that the engine_context has been entered.
148
-
149
- """
150
- assert self._engine is not None, "engine_context must be entered"
151
- return self._engine
152
-
153
- @contextlib.asynccontextmanager
154
- async def engine_context(self) -> AsyncIterator[None]:
155
- """Context manage to manage the engine lifecycle.
156
- This is called once at the application startup
157
- (see ``lifetime_functions``)
158
- """
159
- assert self._engine is None, "engine_context cannot be nested"
160
-
161
- # Set the pool_recycle to 30mn
162
- # That should prevent the problem of MySQL expiring connection
163
- # after 60mn by default
164
- engine = create_async_engine(self._db_url, pool_recycle=60 * 30)
165
- self._engine = engine
166
-
167
- yield
168
-
169
- self._engine = None
170
- await engine.dispose()
171
-
172
- @property
173
- def conn(self) -> AsyncConnection:
174
- if self._conn.get() is None:
175
- raise RuntimeError(f"{self.__class__} was used before entering")
176
- return cast(AsyncConnection, self._conn.get())
177
-
178
- async def __aenter__(self) -> Self:
179
- """
180
- Create a connection.
181
- This is called by the Dependency mechanism (see ``db_transaction``),
182
- It will create a new connection/transaction for each route call.
183
- """
184
- assert self._conn.get() is None, "BaseSQLDB context cannot be nested"
185
- try:
186
- self._conn.set(await self.engine.connect().__aenter__())
187
- except Exception as e:
188
- raise SQLDBUnavailable("Cannot connect to DB") from e
189
-
190
- return self
191
-
192
- async def __aexit__(self, exc_type, exc, tb):
193
- """
194
- This is called when exciting a route.
195
- If there was no exception, the changes in the DB are committed.
196
- Otherwise, they are rollbacked.
197
- """
198
- if exc_type is None:
199
- await self._conn.get().commit()
200
- await self._conn.get().__aexit__(exc_type, exc, tb)
201
- self._conn.set(None)
202
-
203
- async def ping(self):
204
- """
205
- Check whether the connection to the DB is still working.
206
- We could enable the ``pre_ping`` in the engine, but this would
207
- be ran at every query.
208
- """
209
- try:
210
- await self.conn.scalar(select(1))
211
- except OperationalError as e:
212
- raise SQLDBUnavailable("Cannot ping the DB") from e
213
-
214
-
215
- def apply_search_filters(table, stmt, search):
216
- # Apply any filters
217
- for query in search:
218
- column = table.columns[query["parameter"]]
219
- if query["operator"] == "eq":
220
- expr = column == query["value"]
221
- elif query["operator"] == "neq":
222
- expr = column != query["value"]
223
- elif query["operator"] == "gt":
224
- expr = column > query["value"]
225
- elif query["operator"] == "lt":
226
- expr = column < query["value"]
227
- elif query["operator"] == "in":
228
- expr = column.in_(query["values"])
229
- elif query["operator"] in "like":
230
- expr = column.like(query["value"])
231
- elif query["operator"] in "ilike":
232
- expr = column.ilike(query["value"])
233
- else:
234
- raise InvalidQueryError(f"Unknown filter {query=}")
235
- stmt = stmt.where(expr)
236
- return stmt
@@ -1,27 +0,0 @@
1
- diracx/db/__init__.py,sha256=2oeUeVwZq53bo_ZOflEYZsBn7tcR5Tzb2AIu0TAWELM,109
2
- diracx/db/__main__.py,sha256=3yaUP1ig-yaPSQM4wy6CtSXXHivQg-hIz2FeBt7joBc,1714
3
- diracx/db/exceptions.py,sha256=-LSkEwsvjwU7vXqx-xeLvLKInTRAhjwB7K_AKfQcIH8,41
4
- diracx/db/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- diracx/db/os/__init__.py,sha256=IZr6z6SefrRvuC8sTC4RmB3_wwOyEt1GzpDuwSMH8O4,112
6
- diracx/db/os/job_parameters.py,sha256=Knca19uT2G-5FI7MOFlaOAXeHn4ecPVLIH30TiwhaTw,858
7
- diracx/db/os/utils.py,sha256=mau0_2uRi-I3geefmKQRWFKo4JcIkIUADvnwBiQX700,9129
8
- diracx/db/sql/__init__.py,sha256=R6tk5lo1EHbt8joGDesesYHcc1swIq9T4AaSixhh7lA,252
9
- diracx/db/sql/utils.py,sha256=F55qv4u5JQMV5Cl-CNq4nXRCODlCZWarM8Dnkrx55T8,7930
10
- diracx/db/sql/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- diracx/db/sql/auth/db.py,sha256=mKjy5B8orw0yu6nOwxyzbBqyeE-J9iYq6fKjuELmr9g,10273
12
- diracx/db/sql/auth/schema.py,sha256=JCkSa2IRzqMHTpaSc9aB9h33XsFyEM_Ohsenex6xagY,2835
13
- diracx/db/sql/dummy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- diracx/db/sql/dummy/db.py,sha256=5PIPv6aKY7CGIwmvnGKowjVr9ZQWpbjFSd2PIX7YOUw,1627
15
- diracx/db/sql/dummy/schema.py,sha256=uEkGDNVZbmJecytkHY1CO-M1MiKxe5w1_h0joJMPC9E,680
16
- diracx/db/sql/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- diracx/db/sql/jobs/db.py,sha256=CyQIPX2g5ancBIBEMLijAyTi5HZxTgeVH0qQZ3p3KcU,30722
18
- diracx/db/sql/jobs/schema.py,sha256=YkxIdjTkvLlEZ9IQt86nj80eMvOPbcrfk9aisjmNpqY,9275
19
- diracx/db/sql/jobs/status_utility.py,sha256=w3caA9ng4y-Bzp0DOasj_DQ_M6JxAzJoHVmyJbEZ2qE,10534
20
- diracx/db/sql/sandbox_metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- diracx/db/sql/sandbox_metadata/db.py,sha256=0EDFMfOW_O3pEPTShqBCME9z4j-JKpyYM6-BBccr27E,6303
22
- diracx/db/sql/sandbox_metadata/schema.py,sha256=rngYYkJxBhjETBHGLD1CTipDGe44mRYR0wdaFoAJwp0,1400
23
- diracx_db-0.0.1a17.dist-info/METADATA,sha256=NarGcFXcBgKbSaHiyBPBVRj7dJ4xQ7OpJ8k0oIA7cHI,691
24
- diracx_db-0.0.1a17.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
25
- diracx_db-0.0.1a17.dist-info/entry_points.txt,sha256=xEFGu_zgmPgQPlUeFtdahQfQIboJ1ugFOK8eMio9gtw,271
26
- diracx_db-0.0.1a17.dist-info/top_level.txt,sha256=vJx10tdRlBX3rF2Psgk5jlwVGZNcL3m_7iQWwgPXt-U,7
27
- diracx_db-0.0.1a17.dist-info/RECORD,,
File without changes