lsst-felis 29.2025.2400__py3-none-any.whl → 29.2025.2600__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lsst-felis might be problematic. Click here for more details.
- felis/cli.py +27 -5
- felis/config/tap_schema/columns.csv +33 -0
- felis/config/tap_schema/key_columns.csv +8 -0
- felis/config/tap_schema/keys.csv +8 -0
- felis/config/tap_schema/schemas.csv +2 -0
- felis/config/tap_schema/tables.csv +6 -0
- felis/datamodel.py +237 -23
- felis/metadata.py +5 -0
- felis/tap_schema.py +49 -2
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/METADATA +1 -1
- lsst_felis-29.2025.2600.dist-info/RECORD +31 -0
- lsst_felis-29.2025.2400.dist-info/RECORD +0 -26
- /felis/{schemas → config/tap_schema}/tap_schema_std.yaml +0 -0
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/WHEEL +0 -0
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/entry_points.txt +0 -0
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/licenses/COPYRIGHT +0 -0
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/licenses/LICENSE +0 -0
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/top_level.txt +0 -0
- {lsst_felis-29.2025.2400.dist-info → lsst_felis-29.2025.2600.dist-info}/zip-safe +0 -0
felis/cli.py
CHANGED
|
@@ -38,7 +38,7 @@ from .db.schema import create_database
|
|
|
38
38
|
from .db.utils import DatabaseContext, is_mock_url
|
|
39
39
|
from .diff import DatabaseDiff, FormattedSchemaDiff, SchemaDiff
|
|
40
40
|
from .metadata import MetaDataBuilder
|
|
41
|
-
from .tap_schema import DataLoader, TableManager
|
|
41
|
+
from .tap_schema import DataLoader, MetadataInserter, TableManager
|
|
42
42
|
|
|
43
43
|
__all__ = ["cli"]
|
|
44
44
|
|
|
@@ -284,14 +284,20 @@ def load_tap_schema(
|
|
|
284
284
|
|
|
285
285
|
|
|
286
286
|
@cli.command("init-tap-schema", help="Initialize a standard TAP_SCHEMA database")
|
|
287
|
-
@click.option("--engine-url", envvar="FELIS_ENGINE_URL", help="SQLAlchemy Engine URL")
|
|
287
|
+
@click.option("--engine-url", envvar="FELIS_ENGINE_URL", help="SQLAlchemy Engine URL", required=True)
|
|
288
288
|
@click.option("--tap-schema-name", help="Name of the TAP_SCHEMA schema in the database")
|
|
289
289
|
@click.option(
|
|
290
290
|
"--tap-tables-postfix", help="Postfix which is applied to standard TAP_SCHEMA table names", default=""
|
|
291
291
|
)
|
|
292
|
+
@click.option(
|
|
293
|
+
"--insert-metadata/--no-insert-metadata",
|
|
294
|
+
is_flag=True,
|
|
295
|
+
help="Insert metadata describing TAP_SCHEMA itself",
|
|
296
|
+
default=True,
|
|
297
|
+
)
|
|
292
298
|
@click.pass_context
|
|
293
299
|
def init_tap_schema(
|
|
294
|
-
ctx: click.Context, engine_url: str, tap_schema_name: str, tap_tables_postfix: str
|
|
300
|
+
ctx: click.Context, engine_url: str, tap_schema_name: str, tap_tables_postfix: str, insert_metadata: bool
|
|
295
301
|
) -> None:
|
|
296
302
|
"""Initialize a standard TAP_SCHEMA database.
|
|
297
303
|
|
|
@@ -303,6 +309,10 @@ def init_tap_schema(
|
|
|
303
309
|
Name of the TAP_SCHEMA schema in the database.
|
|
304
310
|
tap_tables_postfix
|
|
305
311
|
Postfix which is applied to standard TAP_SCHEMA table names.
|
|
312
|
+
insert_metadata
|
|
313
|
+
Insert metadata describing TAP_SCHEMA itself.
|
|
314
|
+
If set to False, only the TAP_SCHEMA tables will be created, but no
|
|
315
|
+
metadata will be inserted.
|
|
306
316
|
"""
|
|
307
317
|
url = make_url(engine_url)
|
|
308
318
|
engine: Engine | MockConnection
|
|
@@ -315,6 +325,9 @@ def init_tap_schema(
|
|
|
315
325
|
table_name_postfix=tap_tables_postfix,
|
|
316
326
|
)
|
|
317
327
|
mgr.initialize_database(engine)
|
|
328
|
+
if insert_metadata:
|
|
329
|
+
inserter = MetadataInserter(mgr, engine)
|
|
330
|
+
inserter.insert_metadata()
|
|
318
331
|
|
|
319
332
|
|
|
320
333
|
@cli.command("validate", help="Validate one or more Felis YAML files")
|
|
@@ -465,12 +478,21 @@ def diff(
|
|
|
465
478
|
felis dump schema.yaml schema_dump.yaml
|
|
466
479
|
""",
|
|
467
480
|
)
|
|
481
|
+
@click.option(
|
|
482
|
+
"--strip-ids/--no-strip-ids",
|
|
483
|
+
is_flag=True,
|
|
484
|
+
help="Strip IDs from the output schema",
|
|
485
|
+
default=False,
|
|
486
|
+
)
|
|
468
487
|
@click.argument("files", nargs=2, type=click.Path())
|
|
469
488
|
@click.pass_context
|
|
470
489
|
def dump(
|
|
471
490
|
ctx: click.Context,
|
|
491
|
+
strip_ids: bool,
|
|
472
492
|
files: list[str],
|
|
473
493
|
) -> None:
|
|
494
|
+
if strip_ids:
|
|
495
|
+
logger.info("Stripping IDs from the output schema")
|
|
474
496
|
if files[1].endswith(".json"):
|
|
475
497
|
format = "json"
|
|
476
498
|
elif files[1].endswith(".yaml"):
|
|
@@ -480,9 +502,9 @@ def dump(
|
|
|
480
502
|
schema = Schema.from_uri(files[0], context={"id_generation": ctx.obj["id_generation"]})
|
|
481
503
|
with open(files[1], "w") as f:
|
|
482
504
|
if format == "yaml":
|
|
483
|
-
schema.dump_yaml(f)
|
|
505
|
+
schema.dump_yaml(f, strip_ids=strip_ids)
|
|
484
506
|
elif format == "json":
|
|
485
|
-
schema.dump_json(f)
|
|
507
|
+
schema.dump_json(f, strip_ids=strip_ids)
|
|
486
508
|
|
|
487
509
|
|
|
488
510
|
if __name__ == "__main__":
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
table_name,column_name,utype,ucd,unit,description,datatype,arraysize,xtype,size,principal,indexed,std,column_index
|
|
2
|
+
tap_schema.columns,"""size""",\N,\N,\N,deprecated: use arraysize,int,\N,\N,\N,1,0,1,9
|
|
3
|
+
tap_schema.columns,arraysize,\N,\N,\N,lists the size of variable-length columns in the tableset,char,16*,\N,16,1,0,1,8
|
|
4
|
+
tap_schema.columns,column_index,\N,\N,\N,recommended sort order when listing columns of a table,int,\N,\N,\N,1,0,1,13
|
|
5
|
+
tap_schema.columns,column_name,\N,\N,\N,the column name,char,64*,\N,64,1,0,1,2
|
|
6
|
+
tap_schema.columns,datatype,\N,\N,\N,lists the ADQL datatype of columns in the tableset,char,64*,\N,64,1,0,1,7
|
|
7
|
+
tap_schema.columns,description,\N,\N,\N,describes the columns in the tableset,char,512*,\N,512,1,0,1,6
|
|
8
|
+
tap_schema.columns,indexed,\N,\N,\N,"an indexed column; 1 means 1, 0 means 0",int,\N,\N,\N,1,0,1,11
|
|
9
|
+
tap_schema.columns,principal,\N,\N,\N,"a principal column; 1 means 1, 0 means 0",int,\N,\N,\N,1,0,1,10
|
|
10
|
+
tap_schema.columns,std,\N,\N,\N,"a standard column; 1 means 1, 0 means 0",int,\N,\N,\N,1,0,1,12
|
|
11
|
+
tap_schema.columns,table_name,\N,\N,\N,the table this column belongs to,char,64*,\N,64,1,0,1,1
|
|
12
|
+
tap_schema.columns,ucd,\N,\N,\N,lists the UCDs of columns in the tableset,char,64*,\N,64,1,0,1,4
|
|
13
|
+
tap_schema.columns,unit,\N,\N,\N,lists the unit used for column values in the tableset,char,64*,\N,64,1,0,1,5
|
|
14
|
+
tap_schema.columns,utype,\N,\N,\N,lists the utypes of columns in the tableset,char,512*,\N,512,1,0,1,3
|
|
15
|
+
tap_schema.columns,xtype,\N,\N,\N,a DALI or custom extended type annotation,char,64*,\N,64,1,0,1,7
|
|
16
|
+
tap_schema.key_columns,from_column,\N,\N,\N,column in the from_table,char,64*,\N,64,1,0,1,2
|
|
17
|
+
tap_schema.key_columns,key_id,\N,\N,\N,key to join to tap_schema.keys,char,64*,\N,64,1,0,1,1
|
|
18
|
+
tap_schema.key_columns,target_column,\N,\N,\N,column in the target_table,char,64*,\N,64,1,0,1,3
|
|
19
|
+
tap_schema.keys,description,\N,\N,\N,describes keys in the tableset,char,512*,\N,512,1,0,1,5
|
|
20
|
+
tap_schema.keys,from_table,\N,\N,\N,the table with the foreign key,char,64*,\N,64,1,0,1,2
|
|
21
|
+
tap_schema.keys,key_id,\N,\N,\N,unique key to join to tap_schema.key_columns,char,64*,\N,64,1,0,1,1
|
|
22
|
+
tap_schema.keys,target_table,\N,\N,\N,the table with the primary key,char,64*,\N,64,1,0,1,3
|
|
23
|
+
tap_schema.keys,utype,\N,\N,\N,lists the utype of keys in the tableset,char,512*,\N,512,1,0,1,4
|
|
24
|
+
tap_schema.schemas,description,\N,\N,\N,describes schemas in the tableset,char,512*,\N,512,1,0,1,3
|
|
25
|
+
tap_schema.schemas,schema_index,\N,\N,\N,recommended sort order when listing schemas,int,\N,\N,\N,1,0,1,4
|
|
26
|
+
tap_schema.schemas,schema_name,\N,\N,\N,schema name for reference to tap_schema.schemas,char,64*,\N,64,1,0,1,1
|
|
27
|
+
tap_schema.schemas,utype,\N,\N,\N,lists the utypes of schemas in the tableset,char,512*,\N,512,1,0,1,2
|
|
28
|
+
tap_schema.tables,description,\N,\N,\N,describes tables in the tableset,char,512*,\N,512,1,0,1,5
|
|
29
|
+
tap_schema.tables,schema_name,\N,\N,\N,the schema this table belongs to,char,512*,\N,512,1,0,1,1
|
|
30
|
+
tap_schema.tables,table_index,\N,\N,\N,recommended sort order when listing tables,int,\N,\N,\N,1,0,1,6
|
|
31
|
+
tap_schema.tables,table_name,\N,\N,\N,the fully qualified table name,char,64*,\N,64,1,0,1,2
|
|
32
|
+
tap_schema.tables,table_type,\N,\N,\N,one of: table view,char,8*,\N,8,1,0,1,3
|
|
33
|
+
tap_schema.tables,utype,\N,\N,\N,lists the utype of tables in the tableset,char,512*,\N,512,1,0,1,4
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
key_id,from_table,target_table,utype,description
|
|
2
|
+
k1,tap_schema.tables,tap_schema.schemas,\N,\N
|
|
3
|
+
k2,tap_schema.columns,tap_schema.tables,\N,\N
|
|
4
|
+
k3,tap_schema.keys,tap_schema.tables,\N,\N
|
|
5
|
+
k4,tap_schema.keys,tap_schema.tables,\N,\N
|
|
6
|
+
k5,tap_schema.key_columns,tap_schema.keys,\N,\N
|
|
7
|
+
k6,tap_schema.key_columns,tap_schema.columns,\N,\N
|
|
8
|
+
k7,tap_schema.key_columns,tap_schema.columns,\N,\N
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
schema_name,table_name,table_type,utype,description,table_index
|
|
2
|
+
tap_schema,tap_schema.columns,table,\N,description of columns in this tableset,102000
|
|
3
|
+
tap_schema,tap_schema.key_columns,table,\N,description of foreign key columns in this tableset,104000
|
|
4
|
+
tap_schema,tap_schema.keys,table,\N,description of foreign keys in this tableset,103000
|
|
5
|
+
tap_schema,tap_schema.schemas,table,\N,description of schemas in this tableset,100000
|
|
6
|
+
tap_schema,tap_schema.tables,table,\N,description of tables in this tableset,101000
|
felis/datamodel.py
CHANGED
|
@@ -39,11 +39,13 @@ from pydantic import (
|
|
|
39
39
|
ConfigDict,
|
|
40
40
|
Field,
|
|
41
41
|
PrivateAttr,
|
|
42
|
+
ValidationError,
|
|
42
43
|
ValidationInfo,
|
|
43
44
|
field_serializer,
|
|
44
45
|
field_validator,
|
|
45
46
|
model_validator,
|
|
46
47
|
)
|
|
48
|
+
from pydantic_core import InitErrorDetails
|
|
47
49
|
|
|
48
50
|
from .db.dialects import get_supported_dialects
|
|
49
51
|
from .db.sqltypes import get_type_func
|
|
@@ -633,6 +635,12 @@ class ForeignKeyConstraint(Constraint):
|
|
|
633
635
|
referenced_columns: list[str] = Field(alias="referencedColumns")
|
|
634
636
|
"""The columns referenced by the foreign key."""
|
|
635
637
|
|
|
638
|
+
on_delete: Literal["CASCADE", "SET NULL", "SET DEFAULT", "RESTRICT", "NO ACTION"] | None = None
|
|
639
|
+
"""Action to take when the referenced row is deleted."""
|
|
640
|
+
|
|
641
|
+
on_update: Literal["CASCADE", "SET NULL", "SET DEFAULT", "RESTRICT", "NO ACTION"] | None = None
|
|
642
|
+
"""Action to take when the referenced row is updated."""
|
|
643
|
+
|
|
636
644
|
@field_serializer("type")
|
|
637
645
|
def serialize_type(self, value: str) -> str:
|
|
638
646
|
"""Ensure '@type' is included in serialized output.
|
|
@@ -752,7 +760,10 @@ class ColumnGroup(BaseObject):
|
|
|
752
760
|
for col in self.columns:
|
|
753
761
|
if isinstance(col, str):
|
|
754
762
|
# Dereference ColumnRef to Column object
|
|
755
|
-
|
|
763
|
+
try:
|
|
764
|
+
col_obj = self.table._find_column_by_id(col)
|
|
765
|
+
except KeyError as e:
|
|
766
|
+
raise ValueError(f"Column '{col}' not found in table '{self.table.name}'") from e
|
|
756
767
|
dereferenced_columns.append(col_obj)
|
|
757
768
|
else:
|
|
758
769
|
dereferenced_columns.append(col)
|
|
@@ -902,7 +913,7 @@ class Table(BaseObject):
|
|
|
902
913
|
for column in self.columns:
|
|
903
914
|
if column.id == id:
|
|
904
915
|
return column
|
|
905
|
-
raise
|
|
916
|
+
raise KeyError(f"Column '{id}' not found in table '{self.name}'")
|
|
906
917
|
|
|
907
918
|
@model_validator(mode="after")
|
|
908
919
|
def dereference_column_groups(self: Table) -> Table:
|
|
@@ -1022,6 +1033,56 @@ class SchemaIdVisitor:
|
|
|
1022
1033
|
T = TypeVar("T", bound=BaseObject)
|
|
1023
1034
|
|
|
1024
1035
|
|
|
1036
|
+
def _strip_ids(data: Any) -> Any:
|
|
1037
|
+
"""Recursively strip '@id' fields from a dictionary or list.
|
|
1038
|
+
|
|
1039
|
+
Parameters
|
|
1040
|
+
----------
|
|
1041
|
+
data
|
|
1042
|
+
The data to strip IDs from, which can be a dictionary, list, or any
|
|
1043
|
+
other type. Other types will be returned unchanged.
|
|
1044
|
+
"""
|
|
1045
|
+
if isinstance(data, dict):
|
|
1046
|
+
data.pop("@id", None)
|
|
1047
|
+
for k, v in data.items():
|
|
1048
|
+
data[k] = _strip_ids(v)
|
|
1049
|
+
return data
|
|
1050
|
+
elif isinstance(data, list):
|
|
1051
|
+
return [_strip_ids(item) for item in data]
|
|
1052
|
+
else:
|
|
1053
|
+
return data
|
|
1054
|
+
|
|
1055
|
+
|
|
1056
|
+
def _append_error(
|
|
1057
|
+
errors: list[InitErrorDetails],
|
|
1058
|
+
loc: tuple,
|
|
1059
|
+
input_value: Any,
|
|
1060
|
+
error_message: str,
|
|
1061
|
+
error_type: str = "value_error",
|
|
1062
|
+
) -> None:
|
|
1063
|
+
"""Append an error to the errors list.
|
|
1064
|
+
|
|
1065
|
+
Parameters
|
|
1066
|
+
----------
|
|
1067
|
+
errors : list[InitErrorDetails]
|
|
1068
|
+
The list of errors to append to.
|
|
1069
|
+
loc : tuple
|
|
1070
|
+
The location of the error in the schema.
|
|
1071
|
+
input_value : Any
|
|
1072
|
+
The input value that caused the error.
|
|
1073
|
+
error_message : str
|
|
1074
|
+
The error message to include in the context.
|
|
1075
|
+
"""
|
|
1076
|
+
errors.append(
|
|
1077
|
+
{
|
|
1078
|
+
"type": error_type,
|
|
1079
|
+
"loc": loc,
|
|
1080
|
+
"input": input_value,
|
|
1081
|
+
"ctx": {"error": error_message},
|
|
1082
|
+
}
|
|
1083
|
+
)
|
|
1084
|
+
|
|
1085
|
+
|
|
1025
1086
|
class Schema(BaseObject, Generic[T]):
|
|
1026
1087
|
"""Database schema model.
|
|
1027
1088
|
|
|
@@ -1204,18 +1265,19 @@ class Schema(BaseObject, Generic[T]):
|
|
|
1204
1265
|
|
|
1205
1266
|
return self
|
|
1206
1267
|
|
|
1207
|
-
|
|
1268
|
+
@model_validator(mode="after")
|
|
1269
|
+
def create_id_map(self: Schema) -> Schema:
|
|
1208
1270
|
"""Create a map of IDs to objects.
|
|
1209
1271
|
|
|
1272
|
+
Returns
|
|
1273
|
+
-------
|
|
1274
|
+
`Schema`
|
|
1275
|
+
The schema with the ID map created.
|
|
1276
|
+
|
|
1210
1277
|
Raises
|
|
1211
1278
|
------
|
|
1212
1279
|
ValueError
|
|
1213
1280
|
Raised if duplicate identifiers are found in the schema.
|
|
1214
|
-
|
|
1215
|
-
Notes
|
|
1216
|
-
-----
|
|
1217
|
-
This is called automatically by the `model_post_init` method. If the
|
|
1218
|
-
ID map is already populated, this method will return immediately.
|
|
1219
1281
|
"""
|
|
1220
1282
|
if self._id_map:
|
|
1221
1283
|
logger.debug("Ignoring call to create_id_map() - ID map was already populated")
|
|
@@ -1226,25 +1288,152 @@ class Schema(BaseObject, Generic[T]):
|
|
|
1226
1288
|
raise ValueError(
|
|
1227
1289
|
"Duplicate IDs found in schema:\n " + "\n ".join(visitor.duplicates) + "\n"
|
|
1228
1290
|
)
|
|
1291
|
+
logger.debug("Created ID map with %d entries", len(self._id_map))
|
|
1229
1292
|
return self
|
|
1230
1293
|
|
|
1231
|
-
def
|
|
1232
|
-
|
|
1294
|
+
def _validate_column_id(
|
|
1295
|
+
self: Schema,
|
|
1296
|
+
column_id: str,
|
|
1297
|
+
loc: tuple,
|
|
1298
|
+
errors: list[InitErrorDetails],
|
|
1299
|
+
) -> None:
|
|
1300
|
+
"""Validate a column ID from a constraint and append errors if invalid.
|
|
1233
1301
|
|
|
1234
1302
|
Parameters
|
|
1235
1303
|
----------
|
|
1236
|
-
|
|
1237
|
-
The
|
|
1304
|
+
schema : Schema
|
|
1305
|
+
The schema being validated.
|
|
1306
|
+
column_id : str
|
|
1307
|
+
The column ID to validate.
|
|
1308
|
+
loc : tuple
|
|
1309
|
+
The location of the error in the schema.
|
|
1310
|
+
errors : list[InitErrorDetails]
|
|
1311
|
+
The list of errors to append to.
|
|
1312
|
+
"""
|
|
1313
|
+
if column_id not in self:
|
|
1314
|
+
_append_error(
|
|
1315
|
+
errors,
|
|
1316
|
+
loc,
|
|
1317
|
+
column_id,
|
|
1318
|
+
f"Column ID '{column_id}' not found in schema",
|
|
1319
|
+
)
|
|
1320
|
+
elif not isinstance(self[column_id], Column):
|
|
1321
|
+
_append_error(
|
|
1322
|
+
errors,
|
|
1323
|
+
loc,
|
|
1324
|
+
column_id,
|
|
1325
|
+
f"ID '{column_id}' does not refer to a Column object",
|
|
1326
|
+
)
|
|
1238
1327
|
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1328
|
+
def _validate_foreign_key_column(
|
|
1329
|
+
self: Schema,
|
|
1330
|
+
column_id: str,
|
|
1331
|
+
table: Table,
|
|
1332
|
+
loc: tuple,
|
|
1333
|
+
errors: list[InitErrorDetails],
|
|
1334
|
+
) -> None:
|
|
1335
|
+
"""Validate a foreign key column ID from a constraint and append errors
|
|
1336
|
+
if invalid.
|
|
1243
1337
|
|
|
1244
|
-
|
|
1245
|
-
|
|
1338
|
+
Parameters
|
|
1339
|
+
----------
|
|
1340
|
+
schema : Schema
|
|
1341
|
+
The schema being validated.
|
|
1342
|
+
column_id : str
|
|
1343
|
+
The foreign key column ID to validate.
|
|
1344
|
+
loc : tuple
|
|
1345
|
+
The location of the error in the schema.
|
|
1346
|
+
errors : list[InitErrorDetails]
|
|
1347
|
+
The list of errors to append to.
|
|
1246
1348
|
"""
|
|
1247
|
-
|
|
1349
|
+
try:
|
|
1350
|
+
table._find_column_by_id(column_id)
|
|
1351
|
+
except KeyError:
|
|
1352
|
+
_append_error(
|
|
1353
|
+
errors,
|
|
1354
|
+
loc,
|
|
1355
|
+
column_id,
|
|
1356
|
+
f"Column '{column_id}' not found in table '{table.name}'",
|
|
1357
|
+
)
|
|
1358
|
+
|
|
1359
|
+
@model_validator(mode="after")
|
|
1360
|
+
def check_constraints(self: Schema) -> Schema:
|
|
1361
|
+
"""Check constraint objects for validity. This needs to be deferred
|
|
1362
|
+
until after the schema is fully loaded and the ID map is created.
|
|
1363
|
+
|
|
1364
|
+
Raises
|
|
1365
|
+
------
|
|
1366
|
+
pydantic.ValidationError
|
|
1367
|
+
Raised if any constraints are invalid.
|
|
1368
|
+
|
|
1369
|
+
Returns
|
|
1370
|
+
-------
|
|
1371
|
+
`Schema`
|
|
1372
|
+
The schema being validated.
|
|
1373
|
+
"""
|
|
1374
|
+
errors: list[InitErrorDetails] = []
|
|
1375
|
+
|
|
1376
|
+
for table_index, table in enumerate(self.tables):
|
|
1377
|
+
for constraint_index, constraint in enumerate(table.constraints):
|
|
1378
|
+
column_ids: list[str] = []
|
|
1379
|
+
referenced_column_ids: list[str] = []
|
|
1380
|
+
|
|
1381
|
+
if isinstance(constraint, ForeignKeyConstraint):
|
|
1382
|
+
column_ids += constraint.columns
|
|
1383
|
+
referenced_column_ids += constraint.referenced_columns
|
|
1384
|
+
elif isinstance(constraint, UniqueConstraint):
|
|
1385
|
+
column_ids += constraint.columns
|
|
1386
|
+
# No extra checks are required on CheckConstraint objects.
|
|
1387
|
+
|
|
1388
|
+
# Validate the foreign key columns
|
|
1389
|
+
for column_id in column_ids:
|
|
1390
|
+
self._validate_column_id(
|
|
1391
|
+
column_id,
|
|
1392
|
+
(
|
|
1393
|
+
"tables",
|
|
1394
|
+
table_index,
|
|
1395
|
+
"constraints",
|
|
1396
|
+
constraint_index,
|
|
1397
|
+
"columns",
|
|
1398
|
+
column_id,
|
|
1399
|
+
),
|
|
1400
|
+
errors,
|
|
1401
|
+
)
|
|
1402
|
+
# Check that the foreign key column is within the source
|
|
1403
|
+
# table.
|
|
1404
|
+
self._validate_foreign_key_column(
|
|
1405
|
+
column_id,
|
|
1406
|
+
table,
|
|
1407
|
+
(
|
|
1408
|
+
"tables",
|
|
1409
|
+
table_index,
|
|
1410
|
+
"constraints",
|
|
1411
|
+
constraint_index,
|
|
1412
|
+
"columns",
|
|
1413
|
+
column_id,
|
|
1414
|
+
),
|
|
1415
|
+
errors,
|
|
1416
|
+
)
|
|
1417
|
+
|
|
1418
|
+
# Validate the primary key (reference) columns
|
|
1419
|
+
for referenced_column_id in referenced_column_ids:
|
|
1420
|
+
self._validate_column_id(
|
|
1421
|
+
referenced_column_id,
|
|
1422
|
+
(
|
|
1423
|
+
"tables",
|
|
1424
|
+
table_index,
|
|
1425
|
+
"constraints",
|
|
1426
|
+
constraint_index,
|
|
1427
|
+
"referenced_columns",
|
|
1428
|
+
referenced_column_id,
|
|
1429
|
+
),
|
|
1430
|
+
errors,
|
|
1431
|
+
)
|
|
1432
|
+
|
|
1433
|
+
if errors:
|
|
1434
|
+
raise ValidationError.from_exception_data("Schema validation failed", errors)
|
|
1435
|
+
|
|
1436
|
+
return self
|
|
1248
1437
|
|
|
1249
1438
|
def __getitem__(self, id: str) -> BaseObject:
|
|
1250
1439
|
"""Get an object by its ID.
|
|
@@ -1388,31 +1577,56 @@ class Schema(BaseObject, Generic[T]):
|
|
|
1388
1577
|
yaml_data = yaml.safe_load(source)
|
|
1389
1578
|
return Schema.model_validate(yaml_data, context=context)
|
|
1390
1579
|
|
|
1391
|
-
def
|
|
1580
|
+
def _model_dump(self, strip_ids: bool = False) -> dict[str, Any]:
|
|
1581
|
+
"""Dump the schema as a dictionary with some default arguments
|
|
1582
|
+
applied.
|
|
1583
|
+
|
|
1584
|
+
Parameters
|
|
1585
|
+
----------
|
|
1586
|
+
strip_ids
|
|
1587
|
+
Whether to strip the IDs from the dumped data. Defaults to `False`.
|
|
1588
|
+
|
|
1589
|
+
Returns
|
|
1590
|
+
-------
|
|
1591
|
+
`dict` [ `str`, `Any` ]
|
|
1592
|
+
The dumped schema data as a dictionary.
|
|
1593
|
+
"""
|
|
1594
|
+
data = self.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True)
|
|
1595
|
+
if strip_ids:
|
|
1596
|
+
data = _strip_ids(data)
|
|
1597
|
+
return data
|
|
1598
|
+
|
|
1599
|
+
def dump_yaml(self, stream: IO[str] = sys.stdout, strip_ids: bool = False) -> None:
|
|
1392
1600
|
"""Pretty print the schema as YAML.
|
|
1393
1601
|
|
|
1394
1602
|
Parameters
|
|
1395
1603
|
----------
|
|
1396
1604
|
stream
|
|
1397
1605
|
The stream to write the YAML data to.
|
|
1606
|
+
strip_ids
|
|
1607
|
+
Whether to strip the IDs from the dumped data. Defaults to `False`.
|
|
1398
1608
|
"""
|
|
1609
|
+
data = self._model_dump(strip_ids=strip_ids)
|
|
1399
1610
|
yaml.safe_dump(
|
|
1400
|
-
|
|
1611
|
+
data,
|
|
1401
1612
|
stream,
|
|
1402
1613
|
default_flow_style=False,
|
|
1403
1614
|
sort_keys=False,
|
|
1404
1615
|
)
|
|
1405
1616
|
|
|
1406
|
-
def dump_json(self, stream: IO[str] = sys.stdout) -> None:
|
|
1617
|
+
def dump_json(self, stream: IO[str] = sys.stdout, strip_ids: bool = False) -> None:
|
|
1407
1618
|
"""Pretty print the schema as JSON.
|
|
1408
1619
|
|
|
1409
1620
|
Parameters
|
|
1410
1621
|
----------
|
|
1411
1622
|
stream
|
|
1412
1623
|
The stream to write the JSON data to.
|
|
1624
|
+
strip_ids
|
|
1625
|
+
Whether to strip the IDs from the dumped data. Defaults to `False`.
|
|
1413
1626
|
"""
|
|
1627
|
+
data = self._model_dump(strip_ids=strip_ids)
|
|
1414
1628
|
json.dump(
|
|
1415
|
-
|
|
1629
|
+
data,
|
|
1416
1630
|
stream,
|
|
1417
1631
|
indent=4,
|
|
1418
1632
|
sort_keys=False,
|
felis/metadata.py
CHANGED
|
@@ -338,12 +338,17 @@ class MetaDataBuilder:
|
|
|
338
338
|
"deferrable": constraint_obj.deferrable or None,
|
|
339
339
|
"initially": constraint_obj.initially or None,
|
|
340
340
|
}
|
|
341
|
+
|
|
341
342
|
constraint: Constraint
|
|
342
343
|
|
|
343
344
|
if isinstance(constraint_obj, datamodel.ForeignKeyConstraint):
|
|
344
345
|
fk_obj: datamodel.ForeignKeyConstraint = constraint_obj
|
|
345
346
|
columns = [self._objects[column_id] for column_id in fk_obj.columns]
|
|
346
347
|
refcolumns = [self._objects[column_id] for column_id in fk_obj.referenced_columns]
|
|
348
|
+
if constraint_obj.on_delete is not None:
|
|
349
|
+
args["ondelete"] = constraint_obj.on_delete
|
|
350
|
+
if constraint_obj.on_update is not None:
|
|
351
|
+
args["onupdate"] = constraint_obj.on_update
|
|
347
352
|
constraint = ForeignKeyConstraint(columns, refcolumns, **args)
|
|
348
353
|
elif isinstance(constraint_obj, datamodel.CheckConstraint):
|
|
349
354
|
check_obj: datamodel.CheckConstraint = constraint_obj
|
felis/tap_schema.py
CHANGED
|
@@ -21,6 +21,8 @@
|
|
|
21
21
|
# You should have received a copy of the GNU General Public License
|
|
22
22
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
23
23
|
|
|
24
|
+
import csv
|
|
25
|
+
import io
|
|
24
26
|
import logging
|
|
25
27
|
import os
|
|
26
28
|
import re
|
|
@@ -208,7 +210,7 @@ class TableManager:
|
|
|
208
210
|
str
|
|
209
211
|
The path to the standard TAP_SCHEMA schema resource.
|
|
210
212
|
"""
|
|
211
|
-
return os.path.join(os.path.dirname(__file__), "
|
|
213
|
+
return os.path.join(os.path.dirname(__file__), "config", "tap_schema", "tap_schema_std.yaml")
|
|
212
214
|
|
|
213
215
|
@classmethod
|
|
214
216
|
def get_tap_schema_std_resource(cls) -> ResourcePath:
|
|
@@ -219,7 +221,7 @@ class TableManager:
|
|
|
219
221
|
`~lsst.resources.ResourcePath`
|
|
220
222
|
The standard TAP_SCHEMA schema resource.
|
|
221
223
|
"""
|
|
222
|
-
return ResourcePath("resource://felis/
|
|
224
|
+
return ResourcePath("resource://felis/config/tap_schema/tap_schema_std.yaml")
|
|
223
225
|
|
|
224
226
|
@classmethod
|
|
225
227
|
def get_table_names_std(cls) -> list[str]:
|
|
@@ -708,3 +710,48 @@ class DataLoader:
|
|
|
708
710
|
if index.columns and len(index.columns) == 1 and index.columns[0] == column.id:
|
|
709
711
|
return 1
|
|
710
712
|
return 0
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
class MetadataInserter:
|
|
716
|
+
"""Insert TAP_SCHEMA self-description rows into the database.
|
|
717
|
+
|
|
718
|
+
Parameters
|
|
719
|
+
----------
|
|
720
|
+
mgr
|
|
721
|
+
The table manager that contains the TAP_SCHEMA tables.
|
|
722
|
+
engine
|
|
723
|
+
The engine for connecting to the TAP_SCHEMA database.
|
|
724
|
+
"""
|
|
725
|
+
|
|
726
|
+
def __init__(self, mgr: TableManager, engine: Engine):
|
|
727
|
+
"""Initialize the metadata inserter.
|
|
728
|
+
|
|
729
|
+
Parameters
|
|
730
|
+
----------
|
|
731
|
+
mgr
|
|
732
|
+
The table manager representing the TAP_SCHEMA tables.
|
|
733
|
+
engine
|
|
734
|
+
The SQLAlchemy engine for connecting to the database.
|
|
735
|
+
"""
|
|
736
|
+
self._mgr = mgr
|
|
737
|
+
self._engine = engine
|
|
738
|
+
|
|
739
|
+
def insert_metadata(self) -> None:
|
|
740
|
+
"""Insert the TAP_SCHEMA metadata into the database."""
|
|
741
|
+
for table_name in self._mgr.get_table_names_std():
|
|
742
|
+
table = self._mgr[table_name]
|
|
743
|
+
csv_bytes = ResourcePath(f"resource://felis/config/tap_schema/{table_name}.csv").read()
|
|
744
|
+
text_stream = io.TextIOWrapper(io.BytesIO(csv_bytes), encoding="utf-8")
|
|
745
|
+
reader = csv.reader(text_stream)
|
|
746
|
+
headers = next(reader)
|
|
747
|
+
rows = [
|
|
748
|
+
{key: None if value == "\\N" else value for key, value in zip(headers, row)} for row in reader
|
|
749
|
+
]
|
|
750
|
+
logger.debug(
|
|
751
|
+
"Inserting %d rows into table '%s' with headers: %s",
|
|
752
|
+
len(rows),
|
|
753
|
+
table_name,
|
|
754
|
+
headers,
|
|
755
|
+
)
|
|
756
|
+
with self._engine.begin() as conn:
|
|
757
|
+
conn.execute(table.insert(), rows)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lsst-felis
|
|
3
|
-
Version: 29.2025.
|
|
3
|
+
Version: 29.2025.2600
|
|
4
4
|
Summary: A vocabulary for describing catalogs and acting on those descriptions
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License: GNU General Public License v3 or later (GPLv3+)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
felis/__init__.py,sha256=HnwWzLaPOSnPzAoppSIHzTrGfixEgvkzJdBxa8-03cw,1294
|
|
2
|
+
felis/cli.py,sha256=g6OrBrIylNLiflSvrLlef86BjoiehV3L5eAvVPrxPog,16911
|
|
3
|
+
felis/datamodel.py,sha256=HKg4Ut0qPX7sV6q-Mw9U3BKgjVQFAnhhAUmo9Woh7v8,51228
|
|
4
|
+
felis/diff.py,sha256=z4ZzUocFYVa2y22BWUAMkeeLORmMtaWIDGTVaUE1OIM,7181
|
|
5
|
+
felis/metadata.py,sha256=79YcaIqeFP-pj9zhWpqXlvw_piUTUwuLrV5_8eVYalQ,13763
|
|
6
|
+
felis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
felis/tap_schema.py,sha256=uu2imuzxyuIUbmW4liC6_h4TW5MW2t4SI9rjtMxcyfI,26372
|
|
8
|
+
felis/types.py,sha256=ifZQjc-Uw5CM3L7hmFUb7wcHY1O_HgJCw6HPqyUkHvk,5510
|
|
9
|
+
felis/config/tap_schema/columns.csv,sha256=9RsyuPObUQ_6myux9vKtlQ-aJgs7rvvxoLf6yYSRWqc,3272
|
|
10
|
+
felis/config/tap_schema/key_columns.csv,sha256=dRezco5ltcM1mG--2DvPsbOxB6cwVaBwczwi3It2vag,210
|
|
11
|
+
felis/config/tap_schema/keys.csv,sha256=6zTXyo-1GNfu5sBWpX-7ZJFAtHrxOys78AViCcdPgu8,377
|
|
12
|
+
felis/config/tap_schema/schemas.csv,sha256=z5g1bW1Y9H8nKLZyH4e5xiBBoK9JezR2Xf8L79K2TZk,138
|
|
13
|
+
felis/config/tap_schema/tables.csv,sha256=o0KioOiL7hw9ntCyKWili-iFMjAaGRMUOE-nM30LBD0,510
|
|
14
|
+
felis/config/tap_schema/tap_schema_std.yaml,sha256=sPW-Vk72nY0PFpCvP5d8L8fWvhkif-x32sGtcfDZ8bU,7131
|
|
15
|
+
felis/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
+
felis/db/dialects.py,sha256=XTZEbTnfy96GJDNRLCQMbAV6irerC87vhO_HyTIXLbs,3517
|
|
17
|
+
felis/db/schema.py,sha256=NOFXzBoBQcgpoRlgT3LoC70FKp7pCSmFEJ7rU8FIT-c,2101
|
|
18
|
+
felis/db/sqltypes.py,sha256=Q2p3Af3O5-B1ZxQ4M2j_w8SH1o_kp6ezg8h7LmSlfww,11060
|
|
19
|
+
felis/db/utils.py,sha256=jiKQ_SirKRdQITHe8gSiT_i3ckRHZbkAnwUlEHk2u4Y,14116
|
|
20
|
+
felis/db/variants.py,sha256=eahthrbVeV8ZdGamWQccNmWgx6CCscGrU0vQRs5HZK8,5260
|
|
21
|
+
felis/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
+
felis/tests/postgresql.py,sha256=B_xk4fLual5-viGDqP20r94okuc0pbSvytRH_L0fvMs,4035
|
|
23
|
+
felis/tests/run_cli.py,sha256=Gg8loUIGj9t6KlkRKrEc9Z9b5dtlkpJy94ORuj4BrxU,2503
|
|
24
|
+
lsst_felis-29.2025.2600.dist-info/licenses/COPYRIGHT,sha256=vJAFLFTSF1mhy9eIuA3P6R-3yxTWKQgpig88P-1IzRw,129
|
|
25
|
+
lsst_felis-29.2025.2600.dist-info/licenses/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
|
|
26
|
+
lsst_felis-29.2025.2600.dist-info/METADATA,sha256=olXZcNPImhvS5QjMr8uoEyq_Br5GjLRaBjE4OsujeJM,1433
|
|
27
|
+
lsst_felis-29.2025.2600.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
28
|
+
lsst_felis-29.2025.2600.dist-info/entry_points.txt,sha256=Gk2XFujA_Gp52VBk45g5kim8TDoMDJFPctsMqiq72EM,40
|
|
29
|
+
lsst_felis-29.2025.2600.dist-info/top_level.txt,sha256=F4SvPip3iZRVyISi50CHhwTIAokAhSxjWiVcn4IVWRI,6
|
|
30
|
+
lsst_felis-29.2025.2600.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
31
|
+
lsst_felis-29.2025.2600.dist-info/RECORD,,
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
felis/__init__.py,sha256=HnwWzLaPOSnPzAoppSIHzTrGfixEgvkzJdBxa8-03cw,1294
|
|
2
|
-
felis/cli.py,sha256=jx3yBMEFPgvZTJsTKmWMAx--vBuRBDIOJNBbxMZR6fY,16132
|
|
3
|
-
felis/datamodel.py,sha256=NYTmxuuU6DqL3r2e137sN9xS_WmD_xYeEYKAQjVA6Qs,44245
|
|
4
|
-
felis/diff.py,sha256=z4ZzUocFYVa2y22BWUAMkeeLORmMtaWIDGTVaUE1OIM,7181
|
|
5
|
-
felis/metadata.py,sha256=cYx_qizkLBqcoxWV46h4TbwTi1KVJAkuA2OuUmD-K5k,13536
|
|
6
|
-
felis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
-
felis/tap_schema.py,sha256=vO_f1LvFPU6D1vv2SplIzOW_TV36zOOFOfXQIdD0rMI,24781
|
|
8
|
-
felis/types.py,sha256=ifZQjc-Uw5CM3L7hmFUb7wcHY1O_HgJCw6HPqyUkHvk,5510
|
|
9
|
-
felis/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
felis/db/dialects.py,sha256=XTZEbTnfy96GJDNRLCQMbAV6irerC87vhO_HyTIXLbs,3517
|
|
11
|
-
felis/db/schema.py,sha256=NOFXzBoBQcgpoRlgT3LoC70FKp7pCSmFEJ7rU8FIT-c,2101
|
|
12
|
-
felis/db/sqltypes.py,sha256=Q2p3Af3O5-B1ZxQ4M2j_w8SH1o_kp6ezg8h7LmSlfww,11060
|
|
13
|
-
felis/db/utils.py,sha256=jiKQ_SirKRdQITHe8gSiT_i3ckRHZbkAnwUlEHk2u4Y,14116
|
|
14
|
-
felis/db/variants.py,sha256=eahthrbVeV8ZdGamWQccNmWgx6CCscGrU0vQRs5HZK8,5260
|
|
15
|
-
felis/schemas/tap_schema_std.yaml,sha256=sPW-Vk72nY0PFpCvP5d8L8fWvhkif-x32sGtcfDZ8bU,7131
|
|
16
|
-
felis/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
-
felis/tests/postgresql.py,sha256=B_xk4fLual5-viGDqP20r94okuc0pbSvytRH_L0fvMs,4035
|
|
18
|
-
felis/tests/run_cli.py,sha256=Gg8loUIGj9t6KlkRKrEc9Z9b5dtlkpJy94ORuj4BrxU,2503
|
|
19
|
-
lsst_felis-29.2025.2400.dist-info/licenses/COPYRIGHT,sha256=vJAFLFTSF1mhy9eIuA3P6R-3yxTWKQgpig88P-1IzRw,129
|
|
20
|
-
lsst_felis-29.2025.2400.dist-info/licenses/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
|
|
21
|
-
lsst_felis-29.2025.2400.dist-info/METADATA,sha256=hUHhp37DTyM5vAbFwW5IzcIyghOTQgEDyK2H6u1PoAs,1433
|
|
22
|
-
lsst_felis-29.2025.2400.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
23
|
-
lsst_felis-29.2025.2400.dist-info/entry_points.txt,sha256=Gk2XFujA_Gp52VBk45g5kim8TDoMDJFPctsMqiq72EM,40
|
|
24
|
-
lsst_felis-29.2025.2400.dist-info/top_level.txt,sha256=F4SvPip3iZRVyISi50CHhwTIAokAhSxjWiVcn4IVWRI,6
|
|
25
|
-
lsst_felis-29.2025.2400.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
26
|
-
lsst_felis-29.2025.2400.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|