lsst-felis 26.2024.900__py3-none-any.whl → 29.2025.4500__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. felis/__init__.py +10 -24
  2. felis/cli.py +437 -341
  3. felis/config/tap_schema/columns.csv +33 -0
  4. felis/config/tap_schema/key_columns.csv +8 -0
  5. felis/config/tap_schema/keys.csv +8 -0
  6. felis/config/tap_schema/schemas.csv +2 -0
  7. felis/config/tap_schema/tables.csv +6 -0
  8. felis/config/tap_schema/tap_schema_std.yaml +273 -0
  9. felis/datamodel.py +1386 -193
  10. felis/db/dialects.py +116 -0
  11. felis/db/schema.py +62 -0
  12. felis/db/sqltypes.py +275 -48
  13. felis/db/utils.py +409 -0
  14. felis/db/variants.py +159 -0
  15. felis/diff.py +234 -0
  16. felis/metadata.py +385 -0
  17. felis/tap_schema.py +767 -0
  18. felis/tests/__init__.py +0 -0
  19. felis/tests/postgresql.py +134 -0
  20. felis/tests/run_cli.py +79 -0
  21. felis/types.py +57 -9
  22. lsst_felis-29.2025.4500.dist-info/METADATA +38 -0
  23. lsst_felis-29.2025.4500.dist-info/RECORD +31 -0
  24. {lsst_felis-26.2024.900.dist-info → lsst_felis-29.2025.4500.dist-info}/WHEEL +1 -1
  25. {lsst_felis-26.2024.900.dist-info → lsst_felis-29.2025.4500.dist-info/licenses}/COPYRIGHT +1 -1
  26. felis/check.py +0 -381
  27. felis/simple.py +0 -424
  28. felis/sql.py +0 -275
  29. felis/tap.py +0 -433
  30. felis/utils.py +0 -100
  31. felis/validation.py +0 -103
  32. felis/version.py +0 -2
  33. felis/visitor.py +0 -180
  34. lsst_felis-26.2024.900.dist-info/METADATA +0 -28
  35. lsst_felis-26.2024.900.dist-info/RECORD +0 -23
  36. {lsst_felis-26.2024.900.dist-info → lsst_felis-29.2025.4500.dist-info}/entry_points.txt +0 -0
  37. {lsst_felis-26.2024.900.dist-info → lsst_felis-29.2025.4500.dist-info/licenses}/LICENSE +0 -0
  38. {lsst_felis-26.2024.900.dist-info → lsst_felis-29.2025.4500.dist-info}/top_level.txt +0 -0
  39. {lsst_felis-26.2024.900.dist-info → lsst_felis-29.2025.4500.dist-info}/zip-safe +0 -0
felis/diff.py ADDED
@@ -0,0 +1,234 @@
1
+ """Compare schemas and print the differences."""
2
+
3
+ # This file is part of felis.
4
+ #
5
+ # Developed for the LSST Data Management System.
6
+ # This product includes software developed by the LSST Project
7
+ # (https://www.lsst.org).
8
+ # See the COPYRIGHT file at the top-level directory of this distribution
9
+ # for details of code ownership.
10
+ #
11
+ # This program is free software: you can redistribute it and/or modify
12
+ # it under the terms of the GNU General Public License as published by
13
+ # the Free Software Foundation, either version 3 of the License, or
14
+ # (at your option) any later version.
15
+ #
16
+ # This program is distributed in the hope that it will be useful,
17
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
18
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19
+ # GNU General Public License for more details.
20
+ #
21
+ # You should have received a copy of the GNU General Public License
22
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
23
+
24
+ import logging
25
+ import pprint
26
+ import re
27
+ from collections.abc import Callable
28
+ from typing import Any
29
+
30
+ from alembic.autogenerate import compare_metadata
31
+ from alembic.migration import MigrationContext
32
+ from deepdiff.diff import DeepDiff
33
+ from sqlalchemy import Engine, MetaData
34
+
35
+ from .datamodel import Schema
36
+ from .metadata import MetaDataBuilder
37
+
38
+ __all__ = ["DatabaseDiff", "SchemaDiff"]
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+ # Change alembic log level to avoid unnecessary output
43
+ logging.getLogger("alembic").setLevel(logging.WARNING)
44
+
45
+
46
+ class SchemaDiff:
47
+ """
48
+ Compare two schemas using DeepDiff and print the differences.
49
+
50
+ Parameters
51
+ ----------
52
+ schema1
53
+ The first schema to compare.
54
+ schema2
55
+ The second schema to compare.
56
+ """
57
+
58
+ def __init__(self, schema1: Schema, schema2: Schema):
59
+ self.dict1 = schema1.model_dump(exclude_none=True)
60
+ self.dict2 = schema2.model_dump(exclude_none=True)
61
+ self.diff = DeepDiff(self.dict1, self.dict2, ignore_order=True)
62
+
63
+ def print(self) -> None:
64
+ """
65
+ Print the differences between the two schemas.
66
+ """
67
+ pprint.pprint(self.diff)
68
+
69
+ @property
70
+ def has_changes(self) -> bool:
71
+ """
72
+ Check if there are any differences between the two schemas.
73
+
74
+ Returns
75
+ -------
76
+ bool
77
+ True if there are differences, False otherwise.
78
+ """
79
+ return len(self.diff) > 0
80
+
81
+
82
+ class FormattedSchemaDiff(SchemaDiff):
83
+ """
84
+ Compare two schemas using DeepDiff and print the differences using a
85
+ customized output format.
86
+
87
+ Parameters
88
+ ----------
89
+ schema1
90
+ The first schema to compare.
91
+ schema2
92
+ The second schema to compare.
93
+ """
94
+
95
+ def __init__(self, schema1: Schema, schema2: Schema):
96
+ super().__init__(schema1, schema2)
97
+
98
+ def print(self) -> None:
99
+ """
100
+ Print the differences between the two schemas using a custom format.
101
+ """
102
+ handlers: dict[str, Callable[[dict[str, Any]], None]] = {
103
+ "values_changed": self._handle_values_changed,
104
+ "iterable_item_added": self._handle_iterable_item_added,
105
+ "iterable_item_removed": self._handle_iterable_item_removed,
106
+ "dictionary_item_added": self._handle_dictionary_item_added,
107
+ "dictionary_item_removed": self._handle_dictionary_item_removed,
108
+ }
109
+
110
+ for change_type, handler in handlers.items():
111
+ if change_type in self.diff:
112
+ handler(self.diff[change_type])
113
+
114
+ def _print_header(self, id_dict: dict[str, Any], keys: list[int | str]) -> None:
115
+ # id = self._get_id(id_dict, keys)
116
+ # Don't display ID here for now; it is always just the schema ID.
117
+ print(f"{self._get_key_display(keys)}")
118
+ # print(f"{id} @ {self._get_key_display(keys)}")
119
+
120
+ def _handle_values_changed(self, changes: dict[str, Any]) -> None:
121
+ for key in changes:
122
+ keys = self._parse_deepdiff_path(key)
123
+ value1 = changes[key]["old_value"]
124
+ value2 = changes[key]["new_value"]
125
+ self._print_header(self.dict1, keys)
126
+ print(f"- {value1}")
127
+ print(f"+ {value2}")
128
+
129
+ def _handle_iterable_item_added(self, changes: dict[str, Any]) -> None:
130
+ for key in changes:
131
+ keys = self._parse_deepdiff_path(key)
132
+ value = changes[key]
133
+ self._print_header(self.dict2, keys)
134
+ print(f"+ {value}")
135
+
136
+ def _handle_iterable_item_removed(self, changes: dict[str, Any]) -> None:
137
+ for key in changes:
138
+ keys = self._parse_deepdiff_path(key)
139
+ value = changes[key]
140
+ self._print_header(self.dict1, keys)
141
+ print(f"- {value}")
142
+
143
+ def _handle_dictionary_item_added(self, changes: dict[str, Any]) -> None:
144
+ for key in changes:
145
+ keys = self._parse_deepdiff_path(key)
146
+ value = keys[-1]
147
+ keys.pop()
148
+ self._print_header(self.dict2, keys)
149
+ print(f"+ {value}")
150
+
151
+ def _handle_dictionary_item_removed(self, changes: dict[str, Any]) -> None:
152
+ for key in changes:
153
+ keys = self._parse_deepdiff_path(key)
154
+ value = keys[-1]
155
+ keys.pop()
156
+ self._print_header(self.dict1, keys)
157
+ print(f"- {value}")
158
+
159
+ @staticmethod
160
+ def _get_id(values: dict, keys: list[str | int]) -> str:
161
+ # Unused for now, pending updates to diff tool in DM-49446.
162
+ value: list | dict = values
163
+ last_id = None
164
+
165
+ for key in keys:
166
+ logger.debug(f"Processing key <{key}> with type {type(key)}")
167
+ logger.debug(f"Type of value: {type(value)}")
168
+ if isinstance(value, dict) and "id" in value:
169
+ last_id = value["id"]
170
+ elif isinstance(value, list) and isinstance(key, int):
171
+ if 0 <= key < len(value):
172
+ value = value[key]
173
+ else:
174
+ raise ValueError(f"Index '{key}' is out of range for list of length {len(value)}")
175
+ value = value[key]
176
+
177
+ if isinstance(value, dict) and "id" in value:
178
+ last_id = value["id"]
179
+
180
+ if last_id is not None:
181
+ return last_id
182
+ else:
183
+ raise ValueError("No 'id' found in the specified path")
184
+
185
+ @staticmethod
186
+ def _get_key_display(keys: list[str | int]) -> str:
187
+ return ".".join(str(k) for k in keys)
188
+
189
+ @staticmethod
190
+ def _parse_deepdiff_path(path: str) -> list[str | int]:
191
+ if path.startswith("root"):
192
+ path = path[4:]
193
+
194
+ pattern = re.compile(r"\['([^']+)'\]|\[(\d+)\]")
195
+ matches = pattern.findall(path)
196
+
197
+ keys = []
198
+ for match in matches:
199
+ if match[0]: # String key
200
+ keys.append(match[0])
201
+ elif match[1]: # Integer index
202
+ keys.append(int(match[1]))
203
+
204
+ return keys
205
+
206
+
207
+ class DatabaseDiff(SchemaDiff):
208
+ """
209
+ Compare a schema with a database and print the differences.
210
+
211
+ Parameters
212
+ ----------
213
+ schema
214
+ The schema to compare.
215
+ engine
216
+ The database engine to compare with.
217
+ """
218
+
219
+ def __init__(self, schema: Schema, engine: Engine):
220
+ db_metadata = MetaData()
221
+ with engine.connect() as connection:
222
+ db_metadata.reflect(bind=connection)
223
+ mc = MigrationContext.configure(
224
+ connection, opts={"compare_type": True, "target_metadata": db_metadata}
225
+ )
226
+ schema_metadata = MetaDataBuilder(schema, apply_schema_to_metadata=False).build()
227
+ self.diff = compare_metadata(mc, schema_metadata)
228
+
229
+ def print(self) -> None:
230
+ """
231
+ Print the differences between the schema and the database.
232
+ """
233
+ if self.has_changes:
234
+ pprint.pprint(self.diff)
felis/metadata.py ADDED
@@ -0,0 +1,385 @@
1
+ """Build SQLAlchemy metadata from a Felis schema."""
2
+
3
+ # This file is part of felis.
4
+ #
5
+ # Developed for the LSST Data Management System.
6
+ # This product includes software developed by the LSST Project
7
+ # (https://www.lsst.org).
8
+ # See the COPYRIGHT file at the top-level directory of this distribution
9
+ # for details of code ownership.
10
+ #
11
+ # This program is free software: you can redistribute it and/or modify
12
+ # it under the terms of the GNU General Public License as published by
13
+ # the Free Software Foundation, either version 3 of the License, or
14
+ # (at your option) any later version.
15
+ #
16
+ # This program is distributed in the hope that it will be useful,
17
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
18
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19
+ # GNU General Public License for more details.
20
+ #
21
+ # You should have received a copy of the GNU General Public License
22
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
23
+
24
+ from __future__ import annotations
25
+
26
+ import logging
27
+ from typing import Any, Literal
28
+
29
+ from lsst.utils.iteration import ensure_iterable
30
+ from sqlalchemy import (
31
+ CheckConstraint,
32
+ Column,
33
+ Constraint,
34
+ ForeignKeyConstraint,
35
+ Index,
36
+ MetaData,
37
+ PrimaryKeyConstraint,
38
+ Table,
39
+ TextClause,
40
+ UniqueConstraint,
41
+ text,
42
+ )
43
+ from sqlalchemy.dialects import mysql, postgresql
44
+ from sqlalchemy.types import TypeEngine
45
+
46
+ from felis.datamodel import Schema
47
+ from felis.db.variants import make_variant_dict
48
+
49
+ from . import datamodel
50
+ from .db import sqltypes
51
+ from .types import FelisType
52
+
53
+ __all__ = ("MetaDataBuilder", "get_datatype_with_variants")
54
+
55
+ logger = logging.getLogger(__name__)
56
+
57
+
58
+ def _handle_timestamp_column(column_obj: datamodel.Column, variant_dict: dict[str, TypeEngine[Any]]) -> None:
59
+ """Handle columns with the timestamp datatype.
60
+
61
+ Parameters
62
+ ----------
63
+ column_obj
64
+ The column object representing the timestamp.
65
+ variant_dict
66
+ The dictionary of variant overrides for the datatype.
67
+
68
+ Notes
69
+ -----
70
+ This function updates the variant dictionary with the appropriate
71
+ timestamp type for the column object but only if the precision is set.
72
+ Otherwise, the default timestamp objects defined in the Felis type system
73
+ will be used instead.
74
+ """
75
+ if column_obj.precision is not None:
76
+ args: Any = [False, column_obj.precision] # Turn off timezone.
77
+ variant_dict.update({"postgresql": postgresql.TIMESTAMP(*args), "mysql": mysql.DATETIME(*args)})
78
+
79
+
80
+ def get_datatype_with_variants(column_obj: datamodel.Column) -> TypeEngine:
81
+ """Use the Felis type system to get a SQLAlchemy datatype with variant
82
+ overrides from the information in a Felis column object.
83
+
84
+ Parameters
85
+ ----------
86
+ column_obj
87
+ The column object from which to get the datatype.
88
+
89
+ Returns
90
+ -------
91
+ `~sqlalchemy.types.TypeEngine`
92
+ The SQLAlchemy datatype object.
93
+
94
+ Raises
95
+ ------
96
+ ValueError
97
+ Raised if the column has a sized type but no length or if the datatype
98
+ is invalid.
99
+ """
100
+ variant_dict = make_variant_dict(column_obj)
101
+ felis_type = FelisType.felis_type(column_obj.datatype.value)
102
+ datatype_fun = getattr(sqltypes, column_obj.datatype.value, None)
103
+ if datatype_fun is None:
104
+ raise ValueError(f"Unknown datatype: {column_obj.datatype.value}")
105
+ args = []
106
+ if felis_type.is_sized:
107
+ # Add length argument for size types.
108
+ if not column_obj.length:
109
+ raise ValueError(f"Column {column_obj.name} has sized type '{column_obj.datatype}' but no length")
110
+ args = [column_obj.length]
111
+ if felis_type.is_timestamp:
112
+ _handle_timestamp_column(column_obj, variant_dict)
113
+ return datatype_fun(*args, **variant_dict)
114
+
115
+
116
+ _VALID_SERVER_DEFAULTS = ("CURRENT_TIMESTAMP", "NOW()", "LOCALTIMESTAMP", "NULL")
117
+
118
+
119
+ class MetaDataBuilder:
120
+ """Build a SQLAlchemy metadata object from a Felis schema.
121
+
122
+ Parameters
123
+ ----------
124
+ schema
125
+ The schema object from which to build the SQLAlchemy metadata.
126
+ apply_schema_to_metadata
127
+ Whether to apply the schema name to the metadata object.
128
+ ignore_constraints
129
+ Whether to ignore constraints when building the metadata.
130
+ table_name_postfix
131
+ A string to append to the table names when building the metadata.
132
+ """
133
+
134
+ def __init__(
135
+ self,
136
+ schema: Schema,
137
+ apply_schema_to_metadata: bool = True,
138
+ ignore_constraints: bool = False,
139
+ table_name_postfix: str = "",
140
+ ) -> None:
141
+ """Initialize the metadata builder."""
142
+ self.schema = schema
143
+ if not apply_schema_to_metadata:
144
+ logger.debug("Schema name will not be applied to metadata")
145
+ self.metadata = MetaData(schema=schema.name if apply_schema_to_metadata else None)
146
+ self._objects: dict[str, Any] = {}
147
+ self.ignore_constraints = ignore_constraints
148
+ self.table_name_postfix = table_name_postfix
149
+
150
+ def build(self) -> MetaData:
151
+ """Build the SQLAlchemy tables and constraints from the schema.
152
+
153
+ Notes
154
+ -----
155
+ This first builds the tables and then makes a second pass to build the
156
+ constraints. This is necessary because the constraints may reference
157
+ objects that are not yet created when the tables are built.
158
+
159
+ Returns
160
+ -------
161
+ `~sqlalchemy.sql.schema.MetaData`
162
+ The SQLAlchemy metadata object.
163
+ """
164
+ self.build_tables()
165
+ if not self.ignore_constraints:
166
+ self.build_constraints()
167
+ else:
168
+ logger.warning("Ignoring constraints")
169
+ return self.metadata
170
+
171
+ def build_tables(self) -> None:
172
+ """Build the SQLAlchemy tables from the schema."""
173
+ for table in self.schema.tables:
174
+ self.build_table(table)
175
+ if table.primary_key:
176
+ primary_key = self.build_primary_key(table.primary_key)
177
+ self._objects[table.id].append_constraint(primary_key)
178
+
179
+ def build_primary_key(self, primary_key_columns: str | list[str]) -> PrimaryKeyConstraint:
180
+ """Build a SQAlchemy ``PrimaryKeyConstraint`` from a single column ID
181
+ or a list of them.
182
+
183
+ Parameters
184
+ ----------
185
+ primary_key_columns
186
+ The column ID or list of column IDs from which to build the primary
187
+ key.
188
+
189
+ Returns
190
+ -------
191
+ `~sqlalchemy.sql.schema.PrimaryKeyConstraint`
192
+ The SQLAlchemy primary key constraint object.
193
+
194
+ Notes
195
+ -----
196
+ The ``primary_key_columns`` is a string or a list of strings
197
+ representing IDs which will be used to find the columnn objects in the
198
+ builder's internal ID map.
199
+ """
200
+ return PrimaryKeyConstraint(
201
+ *[self._objects[column_id] for column_id in ensure_iterable(primary_key_columns)]
202
+ )
203
+
204
+ def build_table(self, table_obj: datamodel.Table) -> None:
205
+ """Build a SQLAlchemy ``Table`` from a Felis table and add it to the
206
+ metadata.
207
+
208
+ Parameters
209
+ ----------
210
+ table_obj
211
+ The Felis table object from which to build the SQLAlchemy table.
212
+
213
+ Notes
214
+ -----
215
+ Several MySQL table options, including the engine and charset, are
216
+ handled by adding annotations to the table. This is not needed for
217
+ Postgres, as Felis does not support any table options for this dialect.
218
+ """
219
+ # Process mysql table options.
220
+ optargs = {}
221
+ if table_obj.mysql_engine:
222
+ optargs["mysql_engine"] = table_obj.mysql_engine
223
+ if table_obj.mysql_charset:
224
+ optargs["mysql_charset"] = table_obj.mysql_charset
225
+
226
+ # Create the SQLAlchemy table object and its columns.
227
+ name = table_obj.name
228
+ id = table_obj.id
229
+ description = table_obj.description
230
+ columns = [self.build_column(column) for column in table_obj.columns]
231
+ table = Table(
232
+ name + self.table_name_postfix,
233
+ self.metadata,
234
+ *columns,
235
+ comment=description,
236
+ **optargs, # type: ignore[arg-type]
237
+ )
238
+
239
+ # Create the indexes and add them to the table.
240
+ indexes = [self.build_index(index) for index in table_obj.indexes]
241
+ for index in indexes:
242
+ index._set_parent(table)
243
+ table.indexes.add(index)
244
+
245
+ self._objects[id] = table
246
+
247
+ def build_column(self, column_obj: datamodel.Column) -> Column:
248
+ """Build a SQLAlchemy ``Column`` from a Felis column object.
249
+
250
+ Parameters
251
+ ----------
252
+ column_obj
253
+ The column object from which to build the SQLAlchemy column.
254
+
255
+ Returns
256
+ -------
257
+ `~sqlalchemy.sql.schema.Column`
258
+ The SQLAlchemy column object.
259
+ """
260
+ # Get basic column attributes.
261
+ name = column_obj.name
262
+ id = column_obj.id
263
+ description = column_obj.description
264
+ value = column_obj.value
265
+ nullable = column_obj.nullable
266
+
267
+ # Get datatype, handling variant overrides such as "mysql:datatype".
268
+ datatype = get_datatype_with_variants(column_obj)
269
+
270
+ # Set autoincrement, depending on if it was provided explicitly.
271
+ autoincrement: Literal["auto"] | bool = (
272
+ column_obj.autoincrement if column_obj.autoincrement is not None else "auto"
273
+ )
274
+
275
+ server_default: str | TextClause | None = None
276
+ if value is not None:
277
+ server_default = str(value)
278
+ if server_default in _VALID_SERVER_DEFAULTS or not isinstance(value, str):
279
+ # If the server default is a valid keyword or not a string,
280
+ # use it as is.
281
+ server_default = text(server_default)
282
+
283
+ if server_default is not None:
284
+ logger.debug(f"Column '{id}' has default value: {server_default}")
285
+
286
+ column: Column = Column(
287
+ name,
288
+ datatype,
289
+ comment=description,
290
+ autoincrement=autoincrement,
291
+ nullable=nullable,
292
+ server_default=server_default,
293
+ )
294
+
295
+ self._objects[id] = column
296
+
297
+ return column
298
+
299
+ def build_constraints(self) -> None:
300
+ """Build the SQLAlchemy constraints from the Felis schema and append
301
+ them to the associated table in the metadata.
302
+
303
+ Notes
304
+ -----
305
+ This is performed as a separate step after building the tables so that
306
+ all the referenced objects in the constraints will be present and can
307
+ be looked up by their ID.
308
+ """
309
+ for table_obj in self.schema.tables:
310
+ table = self._objects[table_obj.id]
311
+ for constraint_obj in table_obj.constraints:
312
+ constraint = self.build_constraint(constraint_obj)
313
+ table.append_constraint(constraint)
314
+
315
+ def build_constraint(self, constraint_obj: datamodel.Constraint) -> Constraint:
316
+ """Build a SQLAlchemy ``Constraint`` from a Felis constraint.
317
+
318
+ Parameters
319
+ ----------
320
+ constraint_obj
321
+ The Felis object from which to build the constraint.
322
+
323
+ Returns
324
+ -------
325
+ `~sqlalchemy.sql.schema.Constraint`
326
+ The SQLAlchemy constraint object.
327
+
328
+ Raises
329
+ ------
330
+ ValueError
331
+ If the constraint type is not recognized.
332
+ TypeError
333
+ If the constraint object is not the expected type.
334
+ """
335
+ args: dict[str, Any] = {
336
+ "name": constraint_obj.name or None,
337
+ "comment": constraint_obj.description or None,
338
+ "deferrable": constraint_obj.deferrable or None,
339
+ "initially": constraint_obj.initially or None,
340
+ }
341
+
342
+ constraint: Constraint
343
+
344
+ if isinstance(constraint_obj, datamodel.ForeignKeyConstraint):
345
+ fk_obj: datamodel.ForeignKeyConstraint = constraint_obj
346
+ columns = [self._objects[column_id] for column_id in fk_obj.columns]
347
+ refcolumns = [self._objects[column_id] for column_id in fk_obj.referenced_columns]
348
+ if constraint_obj.on_delete is not None:
349
+ args["ondelete"] = constraint_obj.on_delete
350
+ if constraint_obj.on_update is not None:
351
+ args["onupdate"] = constraint_obj.on_update
352
+ constraint = ForeignKeyConstraint(columns, refcolumns, **args)
353
+ elif isinstance(constraint_obj, datamodel.CheckConstraint):
354
+ check_obj: datamodel.CheckConstraint = constraint_obj
355
+ expression = check_obj.expression
356
+ constraint = CheckConstraint(expression, **args)
357
+ elif isinstance(constraint_obj, datamodel.UniqueConstraint):
358
+ uniq_obj: datamodel.UniqueConstraint = constraint_obj
359
+ columns = [self._objects[column_id] for column_id in uniq_obj.columns]
360
+ constraint = UniqueConstraint(*columns, **args)
361
+ else:
362
+ raise ValueError(f"Unknown constraint type: {type(constraint_obj)}")
363
+
364
+ self._objects[constraint_obj.id] = constraint
365
+
366
+ return constraint
367
+
368
+ def build_index(self, index_obj: datamodel.Index) -> Index:
369
+ """Build a SQLAlchemy ``Index`` from a Felis `~felis.datamodel.Index`.
370
+
371
+ Parameters
372
+ ----------
373
+ index_obj
374
+ The Felis object from which to build the SQLAlchemy index.
375
+
376
+ Returns
377
+ -------
378
+ `~sqlalchemy.sql.schema.Index`
379
+ The SQLAlchemy index object.
380
+ """
381
+ columns = [self._objects[c_id] for c_id in (index_obj.columns if index_obj.columns else [])]
382
+ expressions = index_obj.expressions if index_obj.expressions else []
383
+ index = Index(index_obj.name, *columns, *expressions)
384
+ self._objects[index_obj.id] = index
385
+ return index