lsst-felis 27.2024.3000__tar.gz → 27.2024.3200__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lsst-felis might be problematic. Click here for more details.
- {lsst_felis-27.2024.3000/python/lsst_felis.egg-info → lsst_felis-27.2024.3200}/PKG-INFO +1 -1
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/README.rst +9 -34
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/cli.py +65 -57
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/datamodel.py +11 -11
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/db/dialects.py +1 -1
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/db/sqltypes.py +1 -1
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/db/utils.py +35 -10
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/db/variants.py +2 -2
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/metadata.py +14 -4
- lsst_felis-27.2024.3200/python/felis/tests/__init__.py +0 -0
- lsst_felis-27.2024.3200/python/felis/tests/postgresql.py +134 -0
- lsst_felis-27.2024.3200/python/felis/version.py +2 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200/python/lsst_felis.egg-info}/PKG-INFO +1 -1
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/SOURCES.txt +3 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/tests/test_cli.py +29 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/tests/test_metadata.py +35 -7
- lsst_felis-27.2024.3200/tests/test_postgresql.py +89 -0
- lsst_felis-27.2024.3000/python/felis/version.py +0 -2
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/COPYRIGHT +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/LICENSE +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/pyproject.toml +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/__init__.py +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/db/__init__.py +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/py.typed +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/tap.py +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/felis/types.py +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/dependency_links.txt +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/entry_points.txt +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/requires.txt +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/top_level.txt +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/zip-safe +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/setup.cfg +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/tests/test_datamodel.py +0 -0
- {lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/tests/test_tap.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lsst-felis
|
|
3
|
-
Version: 27.2024.
|
|
3
|
+
Version: 27.2024.3200
|
|
4
4
|
Summary: A vocabulary for describing catalogs and acting on those descriptions
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License: GNU General Public License v3 or later (GPLv3+)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Felis
|
|
2
2
|
=====
|
|
3
3
|
|
|
4
|
-
|PyPI| |Python|
|
|
4
|
+
|Tag| |PyPI| |Python| |Codecov|
|
|
5
5
|
|
|
6
6
|
.. |PyPI| image:: https://img.shields.io/pypi/v/lsst-felis
|
|
7
7
|
:target: https://pypi.org/project/lsst-felis
|
|
@@ -11,6 +11,14 @@ Felis
|
|
|
11
11
|
:target: https://pypi.org/project/lsst-felis
|
|
12
12
|
:alt: PyPI - Python Version
|
|
13
13
|
|
|
14
|
+
.. |Codecov| image:: https://codecov.io/gh/lsst/felis/branch/main/graph/badge.svg
|
|
15
|
+
:target: https://codecov.io/gh/lsst/felis
|
|
16
|
+
:alt: Codecov
|
|
17
|
+
|
|
18
|
+
.. |Tag| image:: https://img.shields.io/github/v/tag/lsst/felis
|
|
19
|
+
:target: https://github.com/lsst/felis/tags
|
|
20
|
+
:alt: Latest Tag
|
|
21
|
+
|
|
14
22
|
YAML Schema Definition Language for Databases
|
|
15
23
|
|
|
16
24
|
Overview
|
|
@@ -39,45 +47,12 @@ that it can be used as a general tool to define, update, and manage database
|
|
|
39
47
|
schemas in a way that is independent of database variant or implementation
|
|
40
48
|
language such as SQL.
|
|
41
49
|
|
|
42
|
-
Installation and Usage
|
|
43
|
-
----------------------
|
|
44
|
-
|
|
45
|
-
Felis is designed to work with Python 3.11 and 3.12 and may be installed using
|
|
46
|
-
`pip <https://pypi.org/project/pip/>`_::
|
|
47
|
-
|
|
48
|
-
pip install lsst-felis
|
|
49
|
-
|
|
50
|
-
The `felis` command-line tool that is installed with the package can be used to
|
|
51
|
-
perform various actions on the YAML schema files, including validating the
|
|
52
|
-
schema definitions, generating DDL statements for various databases, or
|
|
53
|
-
updating a TAP service with schema metadata. The command line help provides
|
|
54
|
-
documentation on all of these utilities::
|
|
55
|
-
|
|
56
|
-
felis --help
|
|
57
|
-
|
|
58
|
-
Individual subcommands also have their own documentation::
|
|
59
|
-
|
|
60
|
-
felis validate --help
|
|
61
|
-
|
|
62
|
-
For instance, this command can be used to validate a schema file::
|
|
63
|
-
|
|
64
|
-
felis validate myschema.yaml
|
|
65
|
-
|
|
66
|
-
If the schema generates validation errors, then these will be printed to the
|
|
67
|
-
terminal. These errors may include missing required attributes, misspelled YAML
|
|
68
|
-
keys, invalid data values, etc.
|
|
69
|
-
|
|
70
50
|
Documentation
|
|
71
51
|
-------------
|
|
72
52
|
|
|
73
53
|
Detailed information on usage, customization, and design is available at the
|
|
74
54
|
`Felis documentation site <https://felis.lsst.io>`_.
|
|
75
55
|
|
|
76
|
-
Presentations
|
|
77
|
-
-------------
|
|
78
|
-
|
|
79
|
-
- `IVOA Inter Op 2018 <https://wiki.ivoa.net/internal/IVOA/InterOpNov2018Apps/Felis_ivoa-11_2018.pdf>`_ - "Felis: A YAML Schema Definition Language for Database Schemas" - `slides <https://wiki.ivoa.net/internal/IVOA/InterOpNov2018Apps/Felis_ivoa-11_2018.pdf>`__
|
|
80
|
-
|
|
81
56
|
Support
|
|
82
57
|
-------
|
|
83
58
|
|
|
@@ -71,26 +71,32 @@ def cli(log_level: str, log_file: str | None) -> None:
|
|
|
71
71
|
|
|
72
72
|
|
|
73
73
|
@cli.command("create", help="Create database objects from the Felis file")
|
|
74
|
-
@click.option("--engine-url", envvar="
|
|
74
|
+
@click.option("--engine-url", envvar="FELIS_ENGINE_URL", help="SQLAlchemy Engine URL", default="sqlite://")
|
|
75
75
|
@click.option("--schema-name", help="Alternate schema name to override Felis file")
|
|
76
76
|
@click.option(
|
|
77
|
-
"--
|
|
77
|
+
"--initialize",
|
|
78
|
+
is_flag=True,
|
|
79
|
+
help="Create the schema in the database if it does not exist (error if already exists)",
|
|
80
|
+
)
|
|
81
|
+
@click.option(
|
|
82
|
+
"--drop", is_flag=True, help="Drop schema if it already exists in the database (implies --initialize)"
|
|
78
83
|
)
|
|
79
|
-
@click.option("--drop-if-exists", is_flag=True, help="Drop schema if it already exists in the database")
|
|
80
84
|
@click.option("--echo", is_flag=True, help="Echo database commands as they are executed")
|
|
81
85
|
@click.option("--dry-run", is_flag=True, help="Dry run only to print out commands instead of executing")
|
|
82
86
|
@click.option(
|
|
83
87
|
"--output-file", "-o", type=click.File(mode="w"), help="Write SQL commands to a file instead of executing"
|
|
84
88
|
)
|
|
89
|
+
@click.option("--ignore-constraints", is_flag=True, help="Ignore constraints when creating tables")
|
|
85
90
|
@click.argument("file", type=click.File())
|
|
86
91
|
def create(
|
|
87
92
|
engine_url: str,
|
|
88
93
|
schema_name: str | None,
|
|
89
|
-
|
|
90
|
-
|
|
94
|
+
initialize: bool,
|
|
95
|
+
drop: bool,
|
|
91
96
|
echo: bool,
|
|
92
97
|
dry_run: bool,
|
|
93
98
|
output_file: IO[str] | None,
|
|
99
|
+
ignore_constraints: bool,
|
|
94
100
|
file: IO,
|
|
95
101
|
) -> None:
|
|
96
102
|
"""Create database objects from the Felis file.
|
|
@@ -101,9 +107,9 @@ def create(
|
|
|
101
107
|
SQLAlchemy Engine URL.
|
|
102
108
|
schema_name
|
|
103
109
|
Alternate schema name to override Felis file.
|
|
104
|
-
|
|
110
|
+
initialize
|
|
105
111
|
Create the schema in the database if it does not exist.
|
|
106
|
-
|
|
112
|
+
drop
|
|
107
113
|
Drop schema if it already exists in the database.
|
|
108
114
|
echo
|
|
109
115
|
Echo database commands as they are executed.
|
|
@@ -111,54 +117,56 @@ def create(
|
|
|
111
117
|
Dry run only to print out commands instead of executing.
|
|
112
118
|
output_file
|
|
113
119
|
Write SQL commands to a file instead of executing.
|
|
120
|
+
ignore_constraints
|
|
121
|
+
Ignore constraints when creating tables.
|
|
114
122
|
file
|
|
115
123
|
Felis file to read.
|
|
116
|
-
|
|
117
|
-
Notes
|
|
118
|
-
-----
|
|
119
|
-
This command creates database objects from the Felis file. The
|
|
120
|
-
``--create-if-not-exists`` or ``--drop-if-exists`` flags can be used to
|
|
121
|
-
create a new MySQL database or PostgreSQL schema if it does not exist
|
|
122
|
-
already.
|
|
123
124
|
"""
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
125
|
+
try:
|
|
126
|
+
yaml_data = yaml.safe_load(file)
|
|
127
|
+
schema = Schema.model_validate(yaml_data)
|
|
128
|
+
url = make_url(engine_url)
|
|
129
|
+
if schema_name:
|
|
130
|
+
logger.info(f"Overriding schema name with: {schema_name}")
|
|
131
|
+
schema.name = schema_name
|
|
132
|
+
elif url.drivername == "sqlite":
|
|
133
|
+
logger.info("Overriding schema name for sqlite with: main")
|
|
134
|
+
schema.name = "main"
|
|
135
|
+
if not url.host and not url.drivername == "sqlite":
|
|
136
|
+
dry_run = True
|
|
137
|
+
logger.info("Forcing dry run for non-sqlite engine URL with no host")
|
|
138
|
+
|
|
139
|
+
metadata = MetaDataBuilder(schema, ignore_constraints=ignore_constraints).build()
|
|
140
|
+
logger.debug(f"Created metadata with schema name: {metadata.schema}")
|
|
141
|
+
|
|
142
|
+
engine: Engine | MockConnection
|
|
143
|
+
if not dry_run and not output_file:
|
|
144
|
+
engine = create_engine(url, echo=echo)
|
|
145
|
+
else:
|
|
146
|
+
if dry_run:
|
|
147
|
+
logger.info("Dry run will be executed")
|
|
148
|
+
engine = DatabaseContext.create_mock_engine(url, output_file)
|
|
149
|
+
if output_file:
|
|
150
|
+
logger.info("Writing SQL output to: " + output_file.name)
|
|
151
|
+
|
|
152
|
+
context = DatabaseContext(metadata, engine)
|
|
153
|
+
|
|
154
|
+
if drop and initialize:
|
|
155
|
+
raise ValueError("Cannot drop and initialize schema at the same time")
|
|
156
|
+
|
|
157
|
+
if drop:
|
|
158
|
+
logger.debug("Dropping schema if it exists")
|
|
159
|
+
context.drop()
|
|
160
|
+
initialize = True # If schema is dropped, it needs to be recreated.
|
|
161
|
+
|
|
162
|
+
if initialize:
|
|
163
|
+
logger.debug("Creating schema if not exists")
|
|
164
|
+
context.initialize()
|
|
165
|
+
|
|
166
|
+
context.create_all()
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.exception(e)
|
|
169
|
+
raise click.ClickException(str(e))
|
|
162
170
|
|
|
163
171
|
|
|
164
172
|
@cli.command("init-tap", help="Initialize TAP_SCHEMA objects in the database")
|
|
@@ -204,7 +212,7 @@ def init_tap(
|
|
|
204
212
|
tables are created in the database schema specified by the engine URL,
|
|
205
213
|
which must be a PostgreSQL schema or MySQL database that already exists.
|
|
206
214
|
"""
|
|
207
|
-
engine = create_engine(engine_url
|
|
215
|
+
engine = create_engine(engine_url)
|
|
208
216
|
init_tables(
|
|
209
217
|
tap_schema_name,
|
|
210
218
|
tap_schemas_table,
|
|
@@ -217,7 +225,7 @@ def init_tap(
|
|
|
217
225
|
|
|
218
226
|
|
|
219
227
|
@cli.command("load-tap", help="Load metadata from a Felis file into a TAP_SCHEMA database")
|
|
220
|
-
@click.option("--engine-url", envvar="
|
|
228
|
+
@click.option("--engine-url", envvar="FELIS_ENGINE_URL", help="SQLAlchemy Engine URL")
|
|
221
229
|
@click.option("--schema-name", help="Alternate Schema Name for Felis file")
|
|
222
230
|
@click.option("--catalog-name", help="Catalog Name for Schema")
|
|
223
231
|
@click.option("--dry-run", is_flag=True, help="Dry Run Only. Prints out the DDL that would be executed")
|
|
@@ -372,9 +380,9 @@ def validate(
|
|
|
372
380
|
Raises
|
|
373
381
|
------
|
|
374
382
|
click.exceptions.Exit
|
|
375
|
-
|
|
376
|
-
thrown when a schema fails to validate will be logged as an
|
|
377
|
-
message.
|
|
383
|
+
Raised if any validation errors are found. The ``ValidationError``
|
|
384
|
+
which is thrown when a schema fails to validate will be logged as an
|
|
385
|
+
error message.
|
|
378
386
|
|
|
379
387
|
Notes
|
|
380
388
|
-----
|
|
@@ -253,7 +253,7 @@ class Column(BaseObject):
|
|
|
253
253
|
Raises
|
|
254
254
|
------
|
|
255
255
|
ValueError
|
|
256
|
-
If both FITS and IVOA units are provided, or if the unit is
|
|
256
|
+
Raised If both FITS and IVOA units are provided, or if the unit is
|
|
257
257
|
invalid.
|
|
258
258
|
"""
|
|
259
259
|
fits_unit = self.fits_tunit
|
|
@@ -289,7 +289,7 @@ class Column(BaseObject):
|
|
|
289
289
|
Raises
|
|
290
290
|
------
|
|
291
291
|
ValueError
|
|
292
|
-
|
|
292
|
+
Raised if a length is not provided for a sized type.
|
|
293
293
|
"""
|
|
294
294
|
datatype = values.get("datatype")
|
|
295
295
|
if datatype is None:
|
|
@@ -326,7 +326,7 @@ class Column(BaseObject):
|
|
|
326
326
|
Raises
|
|
327
327
|
------
|
|
328
328
|
ValueError
|
|
329
|
-
|
|
329
|
+
Raised if a datatype override is redundant.
|
|
330
330
|
"""
|
|
331
331
|
context = info.context
|
|
332
332
|
if not context or not context.get("check_redundant_datatypes", False):
|
|
@@ -445,8 +445,8 @@ class Index(BaseObject):
|
|
|
445
445
|
Raises
|
|
446
446
|
------
|
|
447
447
|
ValueError
|
|
448
|
-
|
|
449
|
-
specified.
|
|
448
|
+
Raised if both columns and expressions are specified, or if neither
|
|
449
|
+
are specified.
|
|
450
450
|
"""
|
|
451
451
|
if "columns" in values and "expressions" in values:
|
|
452
452
|
raise ValueError("Defining columns and expressions is not valid")
|
|
@@ -547,7 +547,7 @@ class Table(BaseObject):
|
|
|
547
547
|
Raises
|
|
548
548
|
------
|
|
549
549
|
ValueError
|
|
550
|
-
|
|
550
|
+
Raised if column names are not unique.
|
|
551
551
|
"""
|
|
552
552
|
if len(columns) != len(set(column.name for column in columns)):
|
|
553
553
|
raise ValueError("Column names must be unique")
|
|
@@ -570,7 +570,7 @@ class Table(BaseObject):
|
|
|
570
570
|
Raises
|
|
571
571
|
------
|
|
572
572
|
ValueError
|
|
573
|
-
If the table is missing a TAP table index.
|
|
573
|
+
Raised If the table is missing a TAP table index.
|
|
574
574
|
"""
|
|
575
575
|
context = info.context
|
|
576
576
|
if not context or not context.get("check_tap_table_indexes", False):
|
|
@@ -597,7 +597,7 @@ class Table(BaseObject):
|
|
|
597
597
|
Raises
|
|
598
598
|
------
|
|
599
599
|
ValueError
|
|
600
|
-
|
|
600
|
+
Raised if the table is missing a column flagged as 'principal'.
|
|
601
601
|
"""
|
|
602
602
|
context = info.context
|
|
603
603
|
if not context or not context.get("check_tap_principal", False):
|
|
@@ -741,7 +741,7 @@ class Schema(BaseObject):
|
|
|
741
741
|
Raises
|
|
742
742
|
------
|
|
743
743
|
ValueError
|
|
744
|
-
|
|
744
|
+
Raised if table names are not unique.
|
|
745
745
|
"""
|
|
746
746
|
if len(tables) != len(set(table.name for table in tables)):
|
|
747
747
|
raise ValueError("Table names must be unique")
|
|
@@ -779,7 +779,7 @@ class Schema(BaseObject):
|
|
|
779
779
|
Raises
|
|
780
780
|
------
|
|
781
781
|
ValueError
|
|
782
|
-
|
|
782
|
+
Raised if duplicate identifiers are found in the schema.
|
|
783
783
|
|
|
784
784
|
Notes
|
|
785
785
|
-----
|
|
@@ -826,7 +826,7 @@ class Schema(BaseObject):
|
|
|
826
826
|
Raises
|
|
827
827
|
------
|
|
828
828
|
KeyError
|
|
829
|
-
|
|
829
|
+
Raised if the object with the given ID is not found in the schema.
|
|
830
830
|
"""
|
|
831
831
|
if id not in self:
|
|
832
832
|
raise KeyError(f"Object with ID '{id}' not found in schema")
|
|
@@ -109,7 +109,7 @@ def get_dialect_module(dialect_name: str) -> ModuleType:
|
|
|
109
109
|
Raises
|
|
110
110
|
------
|
|
111
111
|
ValueError
|
|
112
|
-
|
|
112
|
+
Raised if the dialect name is not supported.
|
|
113
113
|
"""
|
|
114
114
|
if dialect_name not in _DIALECT_MODULES:
|
|
115
115
|
raise ValueError(f"Unsupported dialect: {dialect_name}")
|
|
@@ -70,7 +70,7 @@ def string_to_typeengine(
|
|
|
70
70
|
Raises
|
|
71
71
|
------
|
|
72
72
|
ValueError
|
|
73
|
-
|
|
73
|
+
Raised if the type string is invalid or the type is not supported.
|
|
74
74
|
|
|
75
75
|
Notes
|
|
76
76
|
-----
|
|
@@ -220,15 +220,15 @@ class DatabaseContext:
|
|
|
220
220
|
self.metadata = metadata
|
|
221
221
|
self.conn = ConnectionWrapper(engine)
|
|
222
222
|
|
|
223
|
-
def
|
|
223
|
+
def initialize(self) -> None:
|
|
224
224
|
"""Create the schema in the database if it does not exist.
|
|
225
225
|
|
|
226
226
|
Raises
|
|
227
227
|
------
|
|
228
228
|
ValueError
|
|
229
|
-
|
|
229
|
+
Raised if the database is not supported or it already exists.
|
|
230
230
|
sqlalchemy.exc.SQLAlchemyError
|
|
231
|
-
|
|
231
|
+
Raised if there is an error creating the schema.
|
|
232
232
|
|
|
233
233
|
Notes
|
|
234
234
|
-----
|
|
@@ -239,24 +239,45 @@ class DatabaseContext:
|
|
|
239
239
|
schema_name = self.metadata.schema
|
|
240
240
|
try:
|
|
241
241
|
if self.dialect_name == "mysql":
|
|
242
|
+
logger.debug(f"Checking if MySQL database exists: {schema_name}")
|
|
243
|
+
result = self.conn.execute(text(f"SHOW DATABASES LIKE '{schema_name}'"))
|
|
244
|
+
if result.fetchone():
|
|
245
|
+
raise ValueError(f"MySQL database '{schema_name}' already exists.")
|
|
242
246
|
logger.debug(f"Creating MySQL database: {schema_name}")
|
|
243
|
-
self.conn.execute(text(f"CREATE DATABASE
|
|
247
|
+
self.conn.execute(text(f"CREATE DATABASE {schema_name}"))
|
|
244
248
|
elif self.dialect_name == "postgresql":
|
|
249
|
+
logger.debug(f"Checking if PG schema exists: {schema_name}")
|
|
250
|
+
result = self.conn.execute(
|
|
251
|
+
text(
|
|
252
|
+
f"""
|
|
253
|
+
SELECT schema_name
|
|
254
|
+
FROM information_schema.schemata
|
|
255
|
+
WHERE schema_name = '{schema_name}'
|
|
256
|
+
"""
|
|
257
|
+
)
|
|
258
|
+
)
|
|
259
|
+
if result.fetchone():
|
|
260
|
+
raise ValueError(f"PostgreSQL schema '{schema_name}' already exists.")
|
|
245
261
|
logger.debug(f"Creating PG schema: {schema_name}")
|
|
246
|
-
self.conn.execute(CreateSchema(schema_name
|
|
262
|
+
self.conn.execute(CreateSchema(schema_name))
|
|
263
|
+
elif self.dialect_name == "sqlite":
|
|
264
|
+
# Just silently ignore this operation for SQLite. The database
|
|
265
|
+
# will still be created if it does not exist and the engine
|
|
266
|
+
# URL is valid.
|
|
267
|
+
pass
|
|
247
268
|
else:
|
|
248
|
-
raise ValueError("
|
|
269
|
+
raise ValueError(f"Initialization not supported for: {self.dialect_name}")
|
|
249
270
|
except SQLAlchemyError as e:
|
|
250
271
|
logger.error(f"Error creating schema: {e}")
|
|
251
272
|
raise
|
|
252
273
|
|
|
253
|
-
def
|
|
274
|
+
def drop(self) -> None:
|
|
254
275
|
"""Drop the schema in the database if it exists.
|
|
255
276
|
|
|
256
277
|
Raises
|
|
257
278
|
------
|
|
258
279
|
ValueError
|
|
259
|
-
|
|
280
|
+
Raised if the database is not supported.
|
|
260
281
|
|
|
261
282
|
Notes
|
|
262
283
|
-----
|
|
@@ -271,8 +292,12 @@ class DatabaseContext:
|
|
|
271
292
|
elif self.dialect_name == "postgresql":
|
|
272
293
|
logger.debug(f"Dropping PostgreSQL schema if exists: {schema_name}")
|
|
273
294
|
self.conn.execute(DropSchema(schema_name, if_exists=True, cascade=True))
|
|
295
|
+
elif self.dialect_name == "sqlite":
|
|
296
|
+
if isinstance(self.engine, Engine):
|
|
297
|
+
logger.debug("Dropping tables in SQLite schema")
|
|
298
|
+
self.metadata.drop_all(bind=self.engine)
|
|
274
299
|
else:
|
|
275
|
-
raise ValueError(f"
|
|
300
|
+
raise ValueError(f"Drop operation not supported for: {self.dialect_name}")
|
|
276
301
|
except SQLAlchemyError as e:
|
|
277
302
|
logger.error(f"Error dropping schema: {e}")
|
|
278
303
|
raise
|
|
@@ -82,7 +82,7 @@ def _get_column_variant_override(field_name: str) -> str:
|
|
|
82
82
|
Raises
|
|
83
83
|
------
|
|
84
84
|
ValueError
|
|
85
|
-
|
|
85
|
+
Raised if the field name is not found in the column variant overrides.
|
|
86
86
|
"""
|
|
87
87
|
if field_name not in _COLUMN_VARIANT_OVERRIDES:
|
|
88
88
|
raise ValueError(f"Field name {field_name} not found in column variant overrides")
|
|
@@ -111,7 +111,7 @@ def _process_variant_override(dialect_name: str, variant_override_str: str) -> t
|
|
|
111
111
|
Raises
|
|
112
112
|
------
|
|
113
113
|
ValueError
|
|
114
|
-
|
|
114
|
+
Raised if the type is not found in the dialect.
|
|
115
115
|
|
|
116
116
|
Notes
|
|
117
117
|
-----
|
|
@@ -94,8 +94,8 @@ def get_datatype_with_variants(column_obj: datamodel.Column) -> TypeEngine:
|
|
|
94
94
|
Raises
|
|
95
95
|
------
|
|
96
96
|
ValueError
|
|
97
|
-
|
|
98
|
-
invalid.
|
|
97
|
+
Raised if the column has a sized type but no length or if the datatype
|
|
98
|
+
is invalid.
|
|
99
99
|
"""
|
|
100
100
|
variant_dict = make_variant_dict(column_obj)
|
|
101
101
|
felis_type = FelisType.felis_type(column_obj.datatype.value)
|
|
@@ -127,10 +127,16 @@ class MetaDataBuilder:
|
|
|
127
127
|
Whether to apply the schema name to the metadata object.
|
|
128
128
|
apply_schema_to_tables
|
|
129
129
|
Whether to apply the schema name to the tables.
|
|
130
|
+
ignore_constraints
|
|
131
|
+
Whether to ignore constraints when building the metadata.
|
|
130
132
|
"""
|
|
131
133
|
|
|
132
134
|
def __init__(
|
|
133
|
-
self,
|
|
135
|
+
self,
|
|
136
|
+
schema: Schema,
|
|
137
|
+
apply_schema_to_metadata: bool = True,
|
|
138
|
+
apply_schema_to_tables: bool = True,
|
|
139
|
+
ignore_constraints: bool = False,
|
|
134
140
|
) -> None:
|
|
135
141
|
"""Initialize the metadata builder."""
|
|
136
142
|
self.schema = schema
|
|
@@ -141,6 +147,7 @@ class MetaDataBuilder:
|
|
|
141
147
|
self.metadata = MetaData(schema=schema.name if apply_schema_to_metadata else None)
|
|
142
148
|
self._objects: dict[str, Any] = {}
|
|
143
149
|
self.apply_schema_to_tables = apply_schema_to_tables
|
|
150
|
+
self.ignore_constraints = ignore_constraints
|
|
144
151
|
|
|
145
152
|
def build(self) -> MetaData:
|
|
146
153
|
"""Build the SQLAlchemy tables and constraints from the schema.
|
|
@@ -157,7 +164,10 @@ class MetaDataBuilder:
|
|
|
157
164
|
The SQLAlchemy metadata object.
|
|
158
165
|
"""
|
|
159
166
|
self.build_tables()
|
|
160
|
-
self.
|
|
167
|
+
if not self.ignore_constraints:
|
|
168
|
+
self.build_constraints()
|
|
169
|
+
else:
|
|
170
|
+
logger.warning("Ignoring constraints")
|
|
161
171
|
return self.metadata
|
|
162
172
|
|
|
163
173
|
def build_tables(self) -> None:
|
|
File without changes
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
"""Provides a temporary Postgresql instance for testing."""
|
|
2
|
+
|
|
3
|
+
# This file is part of felis.
|
|
4
|
+
#
|
|
5
|
+
# Developed for the LSST Data Management System.
|
|
6
|
+
# This product includes software developed by the LSST Project
|
|
7
|
+
# (https://www.lsst.org).
|
|
8
|
+
# See the COPYRIGHT file at the top-level directory of this distribution
|
|
9
|
+
# for details of code ownership.
|
|
10
|
+
#
|
|
11
|
+
# This program is free software: you can redistribute it and/or modify
|
|
12
|
+
# it under the terms of the GNU General Public License as published by
|
|
13
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
14
|
+
# (at your option) any later version.
|
|
15
|
+
#
|
|
16
|
+
# This program is distributed in the hope that it will be useful,
|
|
17
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
18
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
19
|
+
# GNU General Public License for more details.
|
|
20
|
+
#
|
|
21
|
+
# You should have received a copy of the GNU General Public License
|
|
22
|
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
23
|
+
|
|
24
|
+
import gc
|
|
25
|
+
import unittest
|
|
26
|
+
from collections.abc import Iterator
|
|
27
|
+
from contextlib import contextmanager
|
|
28
|
+
|
|
29
|
+
from sqlalchemy import text
|
|
30
|
+
from sqlalchemy.engine import Connection, Engine, create_engine
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
from testing.postgresql import Postgresql # type: ignore
|
|
34
|
+
except ImportError:
|
|
35
|
+
Postgresql = None
|
|
36
|
+
|
|
37
|
+
__all__ = ["TemporaryPostgresInstance", "setup_postgres_test_db"]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class TemporaryPostgresInstance:
|
|
41
|
+
"""Wrapper for a temporary Postgres database.
|
|
42
|
+
|
|
43
|
+
Parameters
|
|
44
|
+
----------
|
|
45
|
+
server
|
|
46
|
+
The ``testing.postgresql.Postgresql`` instance.
|
|
47
|
+
engine
|
|
48
|
+
The SQLAlchemy engine for the temporary database server.
|
|
49
|
+
|
|
50
|
+
Notes
|
|
51
|
+
-----
|
|
52
|
+
This class was copied and modified from
|
|
53
|
+
``lsst.daf.butler.tests.postgresql``.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
def __init__(self, server: Postgresql, engine: Engine) -> None:
|
|
57
|
+
"""Initialize the temporary Postgres database instance."""
|
|
58
|
+
self._server = server
|
|
59
|
+
self._engine = engine
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def url(self) -> str:
|
|
63
|
+
"""Return connection URL for the temporary database server.
|
|
64
|
+
|
|
65
|
+
Returns
|
|
66
|
+
-------
|
|
67
|
+
str
|
|
68
|
+
The connection URL.
|
|
69
|
+
"""
|
|
70
|
+
return self._server.url()
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def engine(self) -> Engine:
|
|
74
|
+
"""Return the SQLAlchemy engine for the temporary database server.
|
|
75
|
+
|
|
76
|
+
Returns
|
|
77
|
+
-------
|
|
78
|
+
`~sqlalchemy.engine.Engine`
|
|
79
|
+
The SQLAlchemy engine.
|
|
80
|
+
"""
|
|
81
|
+
return self._engine
|
|
82
|
+
|
|
83
|
+
@contextmanager
|
|
84
|
+
def begin(self) -> Iterator[Connection]:
|
|
85
|
+
"""Return a SQLAlchemy connection to the test database.
|
|
86
|
+
|
|
87
|
+
Returns
|
|
88
|
+
-------
|
|
89
|
+
`~sqlalchemy.engine.Connection`
|
|
90
|
+
The SQLAlchemy connection.
|
|
91
|
+
"""
|
|
92
|
+
with self._engine.begin() as connection:
|
|
93
|
+
yield connection
|
|
94
|
+
|
|
95
|
+
def print_info(self) -> None:
|
|
96
|
+
"""Print information about the temporary database server."""
|
|
97
|
+
print("\n\n---- PostgreSQL URL ----")
|
|
98
|
+
print(self.url)
|
|
99
|
+
self._engine = create_engine(self.url)
|
|
100
|
+
with self.begin() as conn:
|
|
101
|
+
print("\n---- PostgreSQL Version ----")
|
|
102
|
+
res = conn.execute(text("SELECT version()")).fetchone()
|
|
103
|
+
if res:
|
|
104
|
+
print(res[0])
|
|
105
|
+
print("\n")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@contextmanager
|
|
109
|
+
def setup_postgres_test_db() -> Iterator[TemporaryPostgresInstance]:
|
|
110
|
+
"""Set up a temporary Postgres database instance that can be used for
|
|
111
|
+
testing.
|
|
112
|
+
|
|
113
|
+
Returns
|
|
114
|
+
-------
|
|
115
|
+
TemporaryPostgresInstance
|
|
116
|
+
The temporary Postgres database instance.
|
|
117
|
+
|
|
118
|
+
Raises
|
|
119
|
+
------
|
|
120
|
+
unittest.SkipTest
|
|
121
|
+
Raised if the ``testing.postgresql`` module is not available.
|
|
122
|
+
"""
|
|
123
|
+
if Postgresql is None:
|
|
124
|
+
raise unittest.SkipTest("testing.postgresql module not available.")
|
|
125
|
+
|
|
126
|
+
with Postgresql() as server:
|
|
127
|
+
engine = create_engine(server.url())
|
|
128
|
+
instance = TemporaryPostgresInstance(server, engine)
|
|
129
|
+
yield instance
|
|
130
|
+
|
|
131
|
+
# Clean up any lingering SQLAlchemy engines/connections
|
|
132
|
+
# so they're closed before we shut down the server.
|
|
133
|
+
gc.collect()
|
|
134
|
+
engine.dispose()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: lsst-felis
|
|
3
|
-
Version: 27.2024.
|
|
3
|
+
Version: 27.2024.3200
|
|
4
4
|
Summary: A vocabulary for describing catalogs and acting on those descriptions
|
|
5
5
|
Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
|
|
6
6
|
License: GNU General Public License v3 or later (GPLv3+)
|
|
@@ -16,6 +16,8 @@ python/felis/db/dialects.py
|
|
|
16
16
|
python/felis/db/sqltypes.py
|
|
17
17
|
python/felis/db/utils.py
|
|
18
18
|
python/felis/db/variants.py
|
|
19
|
+
python/felis/tests/__init__.py
|
|
20
|
+
python/felis/tests/postgresql.py
|
|
19
21
|
python/lsst_felis.egg-info/PKG-INFO
|
|
20
22
|
python/lsst_felis.egg-info/SOURCES.txt
|
|
21
23
|
python/lsst_felis.egg-info/dependency_links.txt
|
|
@@ -26,4 +28,5 @@ python/lsst_felis.egg-info/zip-safe
|
|
|
26
28
|
tests/test_cli.py
|
|
27
29
|
tests/test_datamodel.py
|
|
28
30
|
tests/test_metadata.py
|
|
31
|
+
tests/test_postgresql.py
|
|
29
32
|
tests/test_tap.py
|
|
@@ -69,6 +69,25 @@ class CliTestCase(unittest.TestCase):
|
|
|
69
69
|
)
|
|
70
70
|
self.assertEqual(result.exit_code, 0)
|
|
71
71
|
|
|
72
|
+
def test_ignore_constraints(self) -> None:
|
|
73
|
+
"""Test ``--ignore-constraints`` flag of ``create`` command."""
|
|
74
|
+
url = f"sqlite:///{self.tmpdir}/tap.sqlite3"
|
|
75
|
+
|
|
76
|
+
runner = CliRunner()
|
|
77
|
+
result = runner.invoke(
|
|
78
|
+
cli,
|
|
79
|
+
[
|
|
80
|
+
"create",
|
|
81
|
+
"--schema-name=main",
|
|
82
|
+
"--ignore-constraints",
|
|
83
|
+
f"--engine-url={url}",
|
|
84
|
+
"--dry-run",
|
|
85
|
+
TEST_YAML,
|
|
86
|
+
],
|
|
87
|
+
catch_exceptions=False,
|
|
88
|
+
)
|
|
89
|
+
self.assertEqual(result.exit_code, 0)
|
|
90
|
+
|
|
72
91
|
def test_init_tap(self) -> None:
|
|
73
92
|
"""Test for ``init-tap`` command."""
|
|
74
93
|
url = f"sqlite:///{self.tmpdir}/tap.sqlite3"
|
|
@@ -123,6 +142,16 @@ class CliTestCase(unittest.TestCase):
|
|
|
123
142
|
)
|
|
124
143
|
self.assertEqual(result.exit_code, 0)
|
|
125
144
|
|
|
145
|
+
def test_initialize_and_drop(self) -> None:
|
|
146
|
+
"""Test that initialize and drop can't be used together."""
|
|
147
|
+
runner = CliRunner()
|
|
148
|
+
result = runner.invoke(
|
|
149
|
+
cli,
|
|
150
|
+
["create", "--initialize", "--drop", TEST_YAML],
|
|
151
|
+
catch_exceptions=False,
|
|
152
|
+
)
|
|
153
|
+
self.assertTrue(result.exit_code != 0)
|
|
154
|
+
|
|
126
155
|
|
|
127
156
|
if __name__ == "__main__":
|
|
128
157
|
unittest.main()
|
|
@@ -25,6 +25,7 @@ import unittest
|
|
|
25
25
|
import yaml
|
|
26
26
|
from sqlalchemy import (
|
|
27
27
|
CheckConstraint,
|
|
28
|
+
Connection,
|
|
28
29
|
Constraint,
|
|
29
30
|
ForeignKeyConstraint,
|
|
30
31
|
Index,
|
|
@@ -52,11 +53,11 @@ class MetaDataTestCase(unittest.TestCase):
|
|
|
52
53
|
with open(TEST_YAML) as data:
|
|
53
54
|
self.yaml_data = yaml.safe_load(data)
|
|
54
55
|
|
|
55
|
-
def connection(self):
|
|
56
|
+
def connection(self) -> Connection:
|
|
56
57
|
"""Return a connection to the database."""
|
|
57
58
|
return self.engine.connect()
|
|
58
59
|
|
|
59
|
-
def test_create_all(self):
|
|
60
|
+
def test_create_all(self) -> None:
|
|
60
61
|
"""Create all tables in the schema using the metadata object and a
|
|
61
62
|
SQLite connection.
|
|
62
63
|
|
|
@@ -113,16 +114,25 @@ class MetaDataTestCase(unittest.TestCase):
|
|
|
113
114
|
self.assertEqual(md_constraint.name, md_db_constraint.name)
|
|
114
115
|
self.assertEqual(md_constraint.deferrable, md_db_constraint.deferrable)
|
|
115
116
|
self.assertEqual(md_constraint.initially, md_db_constraint.initially)
|
|
116
|
-
|
|
117
|
+
self.assertEqual(
|
|
118
|
+
type(md_constraint), type(md_db_constraint), "Constraint types do not match"
|
|
119
|
+
)
|
|
120
|
+
if isinstance(md_constraint, ForeignKeyConstraint) and isinstance(
|
|
121
|
+
md_db_constraint, ForeignKeyConstraint
|
|
122
|
+
):
|
|
117
123
|
md_fk: ForeignKeyConstraint = md_constraint
|
|
118
124
|
md_db_fk: ForeignKeyConstraint = md_db_constraint
|
|
119
125
|
self.assertEqual(md_fk.referred_table.name, md_db_fk.referred_table.name)
|
|
120
126
|
self.assertEqual(md_fk.column_keys, md_db_fk.column_keys)
|
|
121
|
-
elif isinstance(md_constraint, UniqueConstraint)
|
|
127
|
+
elif isinstance(md_constraint, UniqueConstraint) and isinstance(
|
|
128
|
+
md_db_constraint, UniqueConstraint
|
|
129
|
+
):
|
|
122
130
|
md_uniq: UniqueConstraint = md_constraint
|
|
123
131
|
md_db_uniq: UniqueConstraint = md_db_constraint
|
|
124
132
|
self.assertEqual(md_uniq.columns.keys(), md_db_uniq.columns.keys())
|
|
125
|
-
elif isinstance(md_constraint, CheckConstraint)
|
|
133
|
+
elif isinstance(md_constraint, CheckConstraint) and isinstance(
|
|
134
|
+
md_db_constraint, CheckConstraint
|
|
135
|
+
):
|
|
126
136
|
md_check: CheckConstraint = md_constraint
|
|
127
137
|
md_db_check: CheckConstraint = md_db_constraint
|
|
128
138
|
self.assertEqual(str(md_check.sqltext), str(md_db_check.sqltext))
|
|
@@ -139,7 +149,7 @@ class MetaDataTestCase(unittest.TestCase):
|
|
|
139
149
|
self.assertEqual(md_index.name, md_db_index.name)
|
|
140
150
|
self.assertEqual(md_index.columns.keys(), md_db_index.columns.keys())
|
|
141
151
|
|
|
142
|
-
def test_builder(self):
|
|
152
|
+
def test_builder(self) -> None:
|
|
143
153
|
"""Test that the information in the metadata object created by the
|
|
144
154
|
builder matches the data in the Felis schema used to create it.
|
|
145
155
|
"""
|
|
@@ -188,7 +198,7 @@ class MetaDataTestCase(unittest.TestCase):
|
|
|
188
198
|
for primary_key in primary_keys:
|
|
189
199
|
self.assertTrue(md_table.columns[primary_key].primary_key)
|
|
190
200
|
|
|
191
|
-
def test_timestamp(self):
|
|
201
|
+
def test_timestamp(self) -> None:
|
|
192
202
|
"""Test that the `timestamp` datatype is created correctly."""
|
|
193
203
|
for precision in [None, 6]:
|
|
194
204
|
col = dm.Column(
|
|
@@ -210,6 +220,24 @@ class MetaDataTestCase(unittest.TestCase):
|
|
|
210
220
|
self.assertEqual(mysql_timestamp.timezone, False)
|
|
211
221
|
self.assertEqual(mysql_timestamp.fsp, precision)
|
|
212
222
|
|
|
223
|
+
def test_ignore_constraints(self) -> None:
|
|
224
|
+
"""Test that constraints are not created when the
|
|
225
|
+
``ignore_constraints`` flag is set on the metadata builder.
|
|
226
|
+
"""
|
|
227
|
+
schema = Schema.model_validate(self.yaml_data)
|
|
228
|
+
schema.name = "main"
|
|
229
|
+
builder = MetaDataBuilder(schema, ignore_constraints=True)
|
|
230
|
+
md = builder.build()
|
|
231
|
+
for table in md.tables.values():
|
|
232
|
+
non_primary_key_constraints = [
|
|
233
|
+
c for c in table.constraints if not isinstance(c, PrimaryKeyConstraint)
|
|
234
|
+
]
|
|
235
|
+
self.assertEqual(
|
|
236
|
+
len(non_primary_key_constraints),
|
|
237
|
+
0,
|
|
238
|
+
msg=f"Table {table.name} has non-primary key constraints defined",
|
|
239
|
+
)
|
|
240
|
+
|
|
213
241
|
|
|
214
242
|
if __name__ == "__main__":
|
|
215
243
|
unittest.main()
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
# This file is part of felis.
|
|
2
|
+
#
|
|
3
|
+
# Developed for the LSST Data Management System.
|
|
4
|
+
# This product includes software developed by the LSST Project
|
|
5
|
+
# (https://www.lsst.org).
|
|
6
|
+
# See the COPYRIGHT file at the top-level directory of this distribution
|
|
7
|
+
# for details of code ownership.
|
|
8
|
+
#
|
|
9
|
+
# This program is free software: you can redistribute it and/or modify
|
|
10
|
+
# it under the terms of the GNU General Public License as published by
|
|
11
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
12
|
+
# (at your option) any later version.
|
|
13
|
+
#
|
|
14
|
+
# This program is distributed in the hope that it will be useful,
|
|
15
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
16
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
17
|
+
# GNU General Public License for more details.
|
|
18
|
+
#
|
|
19
|
+
# You should have received a copy of the GNU General Public License
|
|
20
|
+
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
21
|
+
|
|
22
|
+
import os
|
|
23
|
+
import unittest
|
|
24
|
+
|
|
25
|
+
import yaml
|
|
26
|
+
from sqlalchemy import text
|
|
27
|
+
|
|
28
|
+
from felis.datamodel import Schema
|
|
29
|
+
from felis.db.utils import DatabaseContext
|
|
30
|
+
from felis.metadata import MetaDataBuilder
|
|
31
|
+
from felis.tests.postgresql import TemporaryPostgresInstance, setup_postgres_test_db # type: ignore
|
|
32
|
+
|
|
33
|
+
TESTDIR = os.path.abspath(os.path.dirname(__file__))
|
|
34
|
+
TEST_YAML = os.path.join(TESTDIR, "data", "sales.yaml")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class TestPostgresql(unittest.TestCase):
|
|
38
|
+
"""Test PostgreSQL database setup."""
|
|
39
|
+
|
|
40
|
+
postgresql: TemporaryPostgresInstance
|
|
41
|
+
|
|
42
|
+
@classmethod
|
|
43
|
+
def setUpClass(cls) -> None:
|
|
44
|
+
# Create the postgres test server.
|
|
45
|
+
cls.postgresql = cls.enterClassContext(setup_postgres_test_db())
|
|
46
|
+
super().setUpClass()
|
|
47
|
+
|
|
48
|
+
def test_initialize_create_and_drop(self) -> None:
|
|
49
|
+
"""Test database initialization, creation, and deletion in
|
|
50
|
+
PostgreSQL.
|
|
51
|
+
"""
|
|
52
|
+
# Create the schema and metadata
|
|
53
|
+
yaml_data = yaml.safe_load(open(TEST_YAML))
|
|
54
|
+
schema = Schema.model_validate(yaml_data)
|
|
55
|
+
md = MetaDataBuilder(schema).build()
|
|
56
|
+
|
|
57
|
+
# Initialize the database
|
|
58
|
+
ctx = DatabaseContext(md, self.postgresql.engine)
|
|
59
|
+
ctx.initialize()
|
|
60
|
+
ctx.create_all()
|
|
61
|
+
|
|
62
|
+
# Get the names of the tables without the schema prepended
|
|
63
|
+
table_names = [name.split(".")[-1] for name in md.tables.keys()]
|
|
64
|
+
|
|
65
|
+
# Check that the tables and columns are created
|
|
66
|
+
with self.postgresql.begin() as conn:
|
|
67
|
+
res = conn.execute(text("SELECT table_name FROM information_schema.tables"))
|
|
68
|
+
tables = [row[0] for row in res.fetchall()]
|
|
69
|
+
for table_name in table_names:
|
|
70
|
+
self.assertIn(table_name, tables)
|
|
71
|
+
# Check that all columns are created
|
|
72
|
+
expected_columns = [col.name for col in md.tables[f"sales.{table_name}"].columns]
|
|
73
|
+
res = conn.execute(
|
|
74
|
+
text("SELECT column_name FROM information_schema.columns WHERE table_name = :table_name"),
|
|
75
|
+
{"table_name": table_name},
|
|
76
|
+
)
|
|
77
|
+
actual_columns = [row[0] for row in res.fetchall()]
|
|
78
|
+
self.assertSetEqual(set(expected_columns), set(actual_columns))
|
|
79
|
+
|
|
80
|
+
# Drop the schema
|
|
81
|
+
ctx.drop()
|
|
82
|
+
|
|
83
|
+
# Check that the "sales" schema was dropped
|
|
84
|
+
with self.postgresql.begin() as conn:
|
|
85
|
+
res = conn.execute(
|
|
86
|
+
text("SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'sales'")
|
|
87
|
+
)
|
|
88
|
+
schemas = [row[0] for row in res.fetchall()]
|
|
89
|
+
self.assertNotIn("sales", schemas)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
{lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/entry_points.txt
RENAMED
|
File without changes
|
|
File without changes
|
{lsst_felis-27.2024.3000 → lsst_felis-27.2024.3200}/python/lsst_felis.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|