lsst-felis 28.2025.900__py3-none-any.whl → 29.2025.1000__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lsst-felis might be problematic. Click here for more details.

felis/__init__.py CHANGED
@@ -23,4 +23,11 @@ from .datamodel import Schema
23
23
  from .db.schema import create_database
24
24
  from .diff import DatabaseDiff, FormattedSchemaDiff, SchemaDiff
25
25
  from .metadata import MetaDataBuilder
26
- from .version import *
26
+
27
+ from importlib.metadata import PackageNotFoundError, version
28
+
29
+ try:
30
+ __version__ = version("lsst-felis")
31
+ except PackageNotFoundError:
32
+ # Package not installed or scons not run.
33
+ __version__ = "0.0.0"
felis/cli.py CHANGED
@@ -421,5 +421,37 @@ def diff(
421
421
  raise click.ClickException("Schema was changed")
422
422
 
423
423
 
424
+ @cli.command(
425
+ "dump",
426
+ help="""
427
+ Dump a schema file to YAML or JSON format
428
+
429
+ Example:
430
+
431
+ felis dump schema.yaml schema.json
432
+
433
+ felis dump schema.yaml schema_dump.yaml
434
+ """,
435
+ )
436
+ @click.argument("files", nargs=2, type=click.Path())
437
+ @click.pass_context
438
+ def dump(
439
+ ctx: click.Context,
440
+ files: list[str],
441
+ ) -> None:
442
+ if files[1].endswith(".json"):
443
+ format = "json"
444
+ elif files[1].endswith(".yaml"):
445
+ format = "yaml"
446
+ else:
447
+ raise click.ClickException("Output file must have a .json or .yaml extension")
448
+ schema = Schema.from_uri(files[0], context={"id_generation": ctx.obj["id_generation"]})
449
+ with open(files[1], "w") as f:
450
+ if format == "yaml":
451
+ schema.dump_yaml(f)
452
+ elif format == "json":
453
+ schema.dump_json(f)
454
+
455
+
424
456
  if __name__ == "__main__":
425
457
  cli()
felis/datamodel.py CHANGED
@@ -23,16 +23,27 @@
23
23
 
24
24
  from __future__ import annotations
25
25
 
26
+ import json
26
27
  import logging
28
+ import sys
27
29
  from collections.abc import Sequence
28
30
  from enum import StrEnum, auto
29
- from typing import IO, Annotated, Any, Generic, Literal, TypeAlias, TypeVar, Union
31
+ from typing import IO, Annotated, Any, Generic, Literal, TypeAlias, TypeVar
30
32
 
31
33
  import yaml
32
34
  from astropy import units as units # type: ignore
33
35
  from astropy.io.votable import ucd # type: ignore
34
36
  from lsst.resources import ResourcePath, ResourcePathExpression
35
- from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator, model_validator
37
+ from pydantic import (
38
+ BaseModel,
39
+ ConfigDict,
40
+ Field,
41
+ PrivateAttr,
42
+ ValidationInfo,
43
+ field_serializer,
44
+ field_validator,
45
+ model_validator,
46
+ )
36
47
 
37
48
  from .db.dialects import get_supported_dialects
38
49
  from .db.sqltypes import get_type_func
@@ -43,9 +54,10 @@ logger = logging.getLogger(__name__)
43
54
 
44
55
  __all__ = (
45
56
  "BaseObject",
46
- "Column",
47
57
  "CheckConstraint",
58
+ "Column",
48
59
  "Constraint",
60
+ "DataType",
49
61
  "ForeignKeyConstraint",
50
62
  "Index",
51
63
  "Schema",
@@ -58,6 +70,7 @@ CONFIG = ConfigDict(
58
70
  populate_by_name=True, # Populate attributes by name.
59
71
  extra="forbid", # Do not allow extra fields.
60
72
  str_strip_whitespace=True, # Strip whitespace from string fields.
73
+ use_enum_values=False, # Do not use enum values during serialization.
61
74
  )
62
75
  """Pydantic model configuration as described in:
63
76
  https://docs.pydantic.dev/2.0/api/config/#pydantic.config.ConfigDict
@@ -117,7 +130,7 @@ class BaseObject(BaseModel):
117
130
 
118
131
 
119
132
  class DataType(StrEnum):
120
- """`Enum` representing the data types supported by Felis."""
133
+ """``Enum`` representing the data types supported by Felis."""
121
134
 
122
135
  boolean = auto()
123
136
  byte = auto()
@@ -185,12 +198,6 @@ class Column(BaseObject):
185
198
  autoincrement: bool | None = None
186
199
  """Whether the column is autoincremented."""
187
200
 
188
- mysql_datatype: str | None = Field(None, alias="mysql:datatype")
189
- """MySQL datatype override on the column."""
190
-
191
- postgresql_datatype: str | None = Field(None, alias="postgresql:datatype")
192
- """PostgreSQL datatype override on the column."""
193
-
194
201
  ivoa_ucd: str | None = Field(None, alias="ivoa:ucd")
195
202
  """IVOA UCD of the column."""
196
203
 
@@ -219,6 +226,12 @@ class Column(BaseObject):
219
226
  votable_datatype: str | None = Field(None, alias="votable:datatype")
220
227
  """VOTable datatype of the column."""
221
228
 
229
+ mysql_datatype: str | None = Field(None, alias="mysql:datatype")
230
+ """MySQL datatype override on the column."""
231
+
232
+ postgresql_datatype: str | None = Field(None, alias="postgresql:datatype")
233
+ """PostgreSQL datatype override on the column."""
234
+
222
235
  @model_validator(mode="after")
223
236
  def check_value(self) -> Column:
224
237
  """Check that the default value is valid.
@@ -458,6 +471,39 @@ class Column(BaseObject):
458
471
  values["votable:arraysize"] = str(arraysize)
459
472
  return values
460
473
 
474
+ @field_serializer("datatype")
475
+ def serialize_datatype(self, value: DataType) -> str:
476
+ """Convert `DataType` to string when serializing to JSON/YAML.
477
+
478
+ Parameters
479
+ ----------
480
+ value
481
+ The `DataType` value to serialize.
482
+
483
+ Returns
484
+ -------
485
+ `str`
486
+ The serialized `DataType` value.
487
+ """
488
+ return str(value)
489
+
490
+ @field_validator("datatype", mode="before")
491
+ @classmethod
492
+ def deserialize_datatype(cls, value: str) -> DataType:
493
+ """Convert string back into `DataType` when loading from JSON/YAML.
494
+
495
+ Parameters
496
+ ----------
497
+ value
498
+ The string value to deserialize.
499
+
500
+ Returns
501
+ -------
502
+ `DataType`
503
+ The deserialized `DataType` value.
504
+ """
505
+ return DataType(value)
506
+
461
507
 
462
508
  class Constraint(BaseObject):
463
509
  """Table constraint model."""
@@ -493,6 +539,22 @@ class CheckConstraint(Constraint):
493
539
  expression: str
494
540
  """Expression for the check constraint."""
495
541
 
542
+ @field_serializer("type")
543
+ def serialize_type(self, value: str) -> str:
544
+ """Ensure '@type' is included in serialized output.
545
+
546
+ Parameters
547
+ ----------
548
+ value
549
+ The value to serialize.
550
+
551
+ Returns
552
+ -------
553
+ `str`
554
+ The serialized value.
555
+ """
556
+ return value
557
+
496
558
 
497
559
  class UniqueConstraint(Constraint):
498
560
  """Table unique constraint model."""
@@ -503,6 +565,22 @@ class UniqueConstraint(Constraint):
503
565
  columns: list[str]
504
566
  """Columns in the unique constraint."""
505
567
 
568
+ @field_serializer("type")
569
+ def serialize_type(self, value: str) -> str:
570
+ """Ensure '@type' is included in serialized output.
571
+
572
+ Parameters
573
+ ----------
574
+ value
575
+ The value to serialize.
576
+
577
+ Returns
578
+ -------
579
+ `str`
580
+ The serialized value.
581
+ """
582
+ return value
583
+
506
584
 
507
585
  class ForeignKeyConstraint(Constraint):
508
586
  """Table foreign key constraint model.
@@ -525,6 +603,28 @@ class ForeignKeyConstraint(Constraint):
525
603
  referenced_columns: list[str] = Field(alias="referencedColumns")
526
604
  """The columns referenced by the foreign key."""
527
605
 
606
+ @field_serializer("type")
607
+ def serialize_type(self, value: str) -> str:
608
+ """Ensure '@type' is included in serialized output.
609
+
610
+ Parameters
611
+ ----------
612
+ value
613
+ The value to serialize.
614
+
615
+ Returns
616
+ -------
617
+ `str`
618
+ The serialized value.
619
+ """
620
+ return value
621
+
622
+
623
+ _ConstraintType = Annotated[
624
+ CheckConstraint | ForeignKeyConstraint | UniqueConstraint, Field(discriminator="type")
625
+ ]
626
+ """Type alias for a constraint type."""
627
+
528
628
 
529
629
  class Index(BaseObject):
530
630
  """Table index model.
@@ -566,12 +666,6 @@ class Index(BaseObject):
566
666
  return values
567
667
 
568
668
 
569
- _ConstraintType = Annotated[
570
- Union[CheckConstraint, ForeignKeyConstraint, UniqueConstraint], Field(discriminator="type")
571
- ]
572
- """Type alias for a constraint type."""
573
-
574
-
575
669
  ColumnRef: TypeAlias = str
576
670
  """Type alias for a column reference."""
577
671
 
@@ -585,7 +679,7 @@ class ColumnGroup(BaseObject):
585
679
  ivoa_ucd: str | None = Field(None, alias="ivoa:ucd")
586
680
  """IVOA UCD of the column."""
587
681
 
588
- table: Table | None = None
682
+ table: Table | None = Field(None, exclude=True)
589
683
  """Reference to the parent table."""
590
684
 
591
685
  @field_validator("ivoa_ucd")
@@ -635,21 +729,25 @@ class ColumnGroup(BaseObject):
635
729
 
636
730
  self.columns = dereferenced_columns
637
731
 
732
+ @field_serializer("columns")
733
+ def serialize_columns(self, columns: list[ColumnRef | Column]) -> list[str]:
734
+ """Serialize columns as their IDs.
638
735
 
639
- class Table(BaseObject):
640
- """Table model."""
641
-
642
- columns: Sequence[Column]
643
- """Columns in the table."""
736
+ Parameters
737
+ ----------
738
+ columns
739
+ The columns to serialize.
644
740
 
645
- constraints: list[_ConstraintType] = Field(default_factory=list)
646
- """Constraints on the table."""
741
+ Returns
742
+ -------
743
+ `list` [ `str` ]
744
+ The serialized column IDs.
745
+ """
746
+ return [col if isinstance(col, str) else col.id for col in columns]
647
747
 
648
- indexes: list[Index] = Field(default_factory=list)
649
- """Indexes on the table."""
650
748
 
651
- column_groups: list[ColumnGroup] = Field(default_factory=list, alias="columnGroups")
652
- """Column groups in the table."""
749
+ class Table(BaseObject):
750
+ """Table model."""
653
751
 
654
752
  primary_key: str | list[str] | None = Field(None, alias="primaryKey")
655
753
  """Primary key of the table."""
@@ -663,6 +761,18 @@ class Table(BaseObject):
663
761
  mysql_charset: str | None = Field(None, alias="mysql:charset")
664
762
  """MySQL charset to use for the table."""
665
763
 
764
+ columns: Sequence[Column]
765
+ """Columns in the table."""
766
+
767
+ column_groups: list[ColumnGroup] = Field(default_factory=list, alias="columnGroups")
768
+ """Column groups in the table."""
769
+
770
+ constraints: list[_ConstraintType] = Field(default_factory=list)
771
+ """Constraints on the table."""
772
+
773
+ indexes: list[Index] = Field(default_factory=list)
774
+ """Indexes on the table."""
775
+
666
776
  @field_validator("columns", mode="after")
667
777
  @classmethod
668
778
  def check_unique_column_names(cls, columns: list[Column]) -> list[Column]:
@@ -821,10 +931,10 @@ class SchemaIdVisitor:
821
931
  if hasattr(obj, "id"):
822
932
  obj_id = getattr(obj, "id")
823
933
  if self.schema is not None:
824
- if obj_id in self.schema.id_map:
934
+ if obj_id in self.schema._id_map:
825
935
  self.duplicates.add(obj_id)
826
936
  else:
827
- self.schema.id_map[obj_id] = obj
937
+ self.schema._id_map[obj_id] = obj
828
938
 
829
939
  def visit_schema(self, schema: Schema) -> None:
830
940
  """Visit the objects in a schema and build the ID map.
@@ -894,7 +1004,7 @@ class Schema(BaseObject, Generic[T]):
894
1004
  tables: Sequence[Table]
895
1005
  """The tables in the schema."""
896
1006
 
897
- id_map: dict[str, Any] = Field(default_factory=dict, exclude=True)
1007
+ _id_map: dict[str, Any] = PrivateAttr(default_factory=dict)
898
1008
  """Map of IDs to objects."""
899
1009
 
900
1010
  @model_validator(mode="before")
@@ -932,6 +1042,14 @@ class Schema(BaseObject, Generic[T]):
932
1042
  if "@id" not in column:
933
1043
  column["@id"] = f"#{table['name']}.{column['name']}"
934
1044
  logger.debug(f"Generated ID '{column['@id']}' for column '{column['name']}'")
1045
+ if "columnGroups" in table:
1046
+ for column_group in table["columnGroups"]:
1047
+ if "@id" not in column_group:
1048
+ column_group["@id"] = f"#{table['name']}.{column_group['name']}"
1049
+ logger.debug(
1050
+ f"Generated ID '{column_group['@id']}' for column group "
1051
+ f"'{column_group['name']}'"
1052
+ )
935
1053
  if "constraints" in table:
936
1054
  for constraint in table["constraints"]:
937
1055
  if "@id" not in constraint:
@@ -1069,7 +1187,7 @@ class Schema(BaseObject, Generic[T]):
1069
1187
  This is called automatically by the `model_post_init` method. If the
1070
1188
  ID map is already populated, this method will return immediately.
1071
1189
  """
1072
- if len(self.id_map):
1190
+ if self._id_map:
1073
1191
  logger.debug("Ignoring call to create_id_map() - ID map was already populated")
1074
1192
  return self
1075
1193
  visitor: SchemaIdVisitor = SchemaIdVisitor()
@@ -1113,7 +1231,7 @@ class Schema(BaseObject, Generic[T]):
1113
1231
  """
1114
1232
  if id not in self:
1115
1233
  raise KeyError(f"Object with ID '{id}' not found in schema")
1116
- return self.id_map[id]
1234
+ return self._id_map[id]
1117
1235
 
1118
1236
  def __contains__(self, id: str) -> bool:
1119
1237
  """Check if an object with the given ID is in the schema.
@@ -1123,7 +1241,7 @@ class Schema(BaseObject, Generic[T]):
1123
1241
  id
1124
1242
  The ID of the object to check.
1125
1243
  """
1126
- return id in self.id_map
1244
+ return id in self._id_map
1127
1245
 
1128
1246
  def find_object_by_id(self, id: str, obj_type: type[T]) -> T:
1129
1247
  """Find an object with the given type by its ID.
@@ -1239,3 +1357,33 @@ class Schema(BaseObject, Generic[T]):
1239
1357
  logger.debug("Loading schema from: '%s'", source)
1240
1358
  yaml_data = yaml.safe_load(source)
1241
1359
  return Schema.model_validate(yaml_data, context=context)
1360
+
1361
+ def dump_yaml(self, stream: IO[str] = sys.stdout) -> None:
1362
+ """Pretty print the schema as YAML.
1363
+
1364
+ Parameters
1365
+ ----------
1366
+ stream
1367
+ The stream to write the YAML data to.
1368
+ """
1369
+ yaml.safe_dump(
1370
+ self.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True),
1371
+ stream,
1372
+ default_flow_style=False,
1373
+ sort_keys=False,
1374
+ )
1375
+
1376
+ def dump_json(self, stream: IO[str] = sys.stdout) -> None:
1377
+ """Pretty print the schema as JSON.
1378
+
1379
+ Parameters
1380
+ ----------
1381
+ stream
1382
+ The stream to write the JSON data to.
1383
+ """
1384
+ json.dump(
1385
+ self.model_dump(by_alias=True, exclude_none=True, exclude_defaults=True),
1386
+ stream,
1387
+ indent=4,
1388
+ sort_keys=False,
1389
+ )
felis/db/dialects.py CHANGED
@@ -32,7 +32,7 @@ from sqlalchemy.engine.mock import create_mock_engine
32
32
 
33
33
  from .sqltypes import MYSQL, POSTGRES, SQLITE
34
34
 
35
- __all__ = ["get_supported_dialects", "get_dialect_module"]
35
+ __all__ = ["get_dialect_module", "get_supported_dialects"]
36
36
 
37
37
  _DIALECT_NAMES = (MYSQL, POSTGRES, SQLITE)
38
38
  """List of supported dialect names.
felis/db/sqltypes.py CHANGED
@@ -32,20 +32,20 @@ from sqlalchemy.dialects import mysql, postgresql
32
32
  from sqlalchemy.ext.compiler import compiles
33
33
 
34
34
  __all__ = [
35
+ "binary",
35
36
  "boolean",
36
37
  "byte",
37
- "short",
38
+ "char",
39
+ "double",
40
+ "float",
41
+ "get_type_func",
38
42
  "int",
39
43
  "long",
40
- "float",
41
- "double",
42
- "char",
44
+ "short",
43
45
  "string",
44
- "unicode",
45
46
  "text",
46
- "binary",
47
47
  "timestamp",
48
- "get_type_func",
48
+ "unicode",
49
49
  ]
50
50
 
51
51
  MYSQL = "mysql"
felis/db/utils.py CHANGED
@@ -38,7 +38,7 @@ from sqlalchemy.types import TypeEngine
38
38
 
39
39
  from .dialects import get_dialect_module
40
40
 
41
- __all__ = ["string_to_typeengine", "SQLWriter", "ConnectionWrapper", "DatabaseContext"]
41
+ __all__ = ["ConnectionWrapper", "DatabaseContext", "SQLWriter", "string_to_typeengine"]
42
42
 
43
43
  logger = logging.getLogger("felis")
44
44
 
felis/diff.py CHANGED
@@ -35,7 +35,7 @@ from sqlalchemy import Engine, MetaData
35
35
  from .datamodel import Schema
36
36
  from .metadata import MetaDataBuilder
37
37
 
38
- __all__ = ["SchemaDiff", "DatabaseDiff"]
38
+ __all__ = ["DatabaseDiff", "SchemaDiff"]
39
39
 
40
40
  logger = logging.getLogger(__name__)
41
41
 
felis/tap_schema.py CHANGED
@@ -41,7 +41,7 @@ from felis.metadata import MetaDataBuilder
41
41
 
42
42
  from .types import FelisType
43
43
 
44
- __all__ = ["TableManager", "DataLoader"]
44
+ __all__ = ["DataLoader", "TableManager"]
45
45
 
46
46
  logger = logging.getLogger(__name__)
47
47
 
felis/types.py CHANGED
@@ -26,20 +26,20 @@ from __future__ import annotations
26
26
  from typing import Any
27
27
 
28
28
  __all__ = [
29
- "FelisType",
29
+ "Binary",
30
30
  "Boolean",
31
31
  "Byte",
32
- "Short",
32
+ "Char",
33
+ "Double",
34
+ "FelisType",
35
+ "Float",
33
36
  "Int",
34
37
  "Long",
35
- "Float",
36
- "Double",
37
- "Char",
38
+ "Short",
38
39
  "String",
39
- "Unicode",
40
40
  "Text",
41
- "Binary",
42
41
  "Timestamp",
42
+ "Unicode",
43
43
  ]
44
44
 
45
45
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: lsst-felis
3
- Version: 28.2025.900
3
+ Version: 29.2025.1000
4
4
  Summary: A vocabulary for describing catalogs and acting on those descriptions
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: GNU General Public License v3 or later (GPLv3+)
@@ -0,0 +1,25 @@
1
+ felis/__init__.py,sha256=HnwWzLaPOSnPzAoppSIHzTrGfixEgvkzJdBxa8-03cw,1294
2
+ felis/cli.py,sha256=Ln2Not-hnquqay3pPpiBSAc8P2nSoyYu2WI_3BBPQs0,15278
3
+ felis/datamodel.py,sha256=VC11uKxSumksen_3zyCkH9l9A_BcTjMib341fLyOLMU,43068
4
+ felis/diff.py,sha256=Vs4JuNwHmm7FCXuhIWMNHB7BTQk0zCPq6tUXQq66Otw,7492
5
+ felis/metadata.py,sha256=cYx_qizkLBqcoxWV46h4TbwTi1KVJAkuA2OuUmD-K5k,13536
6
+ felis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ felis/tap_schema.py,sha256=Hz092tOOHbAvESWL85dNWp1O5ovJeacmJTn2vxB-wh0,22753
8
+ felis/types.py,sha256=ifZQjc-Uw5CM3L7hmFUb7wcHY1O_HgJCw6HPqyUkHvk,5510
9
+ felis/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ felis/db/dialects.py,sha256=XTZEbTnfy96GJDNRLCQMbAV6irerC87vhO_HyTIXLbs,3517
11
+ felis/db/schema.py,sha256=NOFXzBoBQcgpoRlgT3LoC70FKp7pCSmFEJ7rU8FIT-c,2101
12
+ felis/db/sqltypes.py,sha256=Q2p3Af3O5-B1ZxQ4M2j_w8SH1o_kp6ezg8h7LmSlfww,11060
13
+ felis/db/utils.py,sha256=jiKQ_SirKRdQITHe8gSiT_i3ckRHZbkAnwUlEHk2u4Y,14116
14
+ felis/db/variants.py,sha256=eahthrbVeV8ZdGamWQccNmWgx6CCscGrU0vQRs5HZK8,5260
15
+ felis/schemas/tap_schema_std.yaml,sha256=sPW-Vk72nY0PFpCvP5d8L8fWvhkif-x32sGtcfDZ8bU,7131
16
+ felis/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ felis/tests/postgresql.py,sha256=B_xk4fLual5-viGDqP20r94okuc0pbSvytRH_L0fvMs,4035
18
+ lsst_felis-29.2025.1000.dist-info/COPYRIGHT,sha256=vJAFLFTSF1mhy9eIuA3P6R-3yxTWKQgpig88P-1IzRw,129
19
+ lsst_felis-29.2025.1000.dist-info/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
20
+ lsst_felis-29.2025.1000.dist-info/METADATA,sha256=uAXaFTJqCpCWULPhZCm3q6f2aeLvzJZ3E2IzJanK_Us,1411
21
+ lsst_felis-29.2025.1000.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
22
+ lsst_felis-29.2025.1000.dist-info/entry_points.txt,sha256=Gk2XFujA_Gp52VBk45g5kim8TDoMDJFPctsMqiq72EM,40
23
+ lsst_felis-29.2025.1000.dist-info/top_level.txt,sha256=F4SvPip3iZRVyISi50CHhwTIAokAhSxjWiVcn4IVWRI,6
24
+ lsst_felis-29.2025.1000.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
25
+ lsst_felis-29.2025.1000.dist-info/RECORD,,
felis/version.py DELETED
@@ -1,2 +0,0 @@
1
- __all__ = ["__version__"]
2
- __version__ = "28.2025.900"
@@ -1,26 +0,0 @@
1
- felis/__init__.py,sha256=r1KFSnc55gziwUuYb9s2EfwrI_85aa3LpaKwk6rUvvs,1108
2
- felis/cli.py,sha256=Wf-sEUZ-B9zzn4M1huY2ruV1nkgVmpzX8f8iuFfyxZc,14469
3
- felis/datamodel.py,sha256=NczAA4HBBC4-uxPNsrKAFX-hdlgvCT2qqEJCEqDy4yg,39265
4
- felis/diff.py,sha256=0N4OcBCzbL9DW_XGAeuvGsQ0zIhq8fY-Kx2QdvLv-Ds,7492
5
- felis/metadata.py,sha256=cYx_qizkLBqcoxWV46h4TbwTi1KVJAkuA2OuUmD-K5k,13536
6
- felis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- felis/tap_schema.py,sha256=DgHH4hBf4q_F540TAR9GTKcALwUkk8iTw5pzQlmv1DA,22753
8
- felis/types.py,sha256=m80GSGfNHQ3-NzRuTzKOyRXLJboPxdk9kzpp1SO8XdY,5510
9
- felis/version.py,sha256=SMLiEYE3TidCx-PaKCULel49al2Cp64_dNg6h9nCkUc,54
10
- felis/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- felis/db/dialects.py,sha256=n5La-shu-8fHLIyf8rrazHDyrzATmMCdELtKV_0ymxI,3517
12
- felis/db/schema.py,sha256=NOFXzBoBQcgpoRlgT3LoC70FKp7pCSmFEJ7rU8FIT-c,2101
13
- felis/db/sqltypes.py,sha256=JJy97U8KzAOg5pFi2xZgSjvU8CXXgrzkvCsmo6FLRG4,11060
14
- felis/db/utils.py,sha256=SIl2ryOT2Zn5n0BqdNDxC1HcOoxh0doaKk_hMUGvwAc,14116
15
- felis/db/variants.py,sha256=eahthrbVeV8ZdGamWQccNmWgx6CCscGrU0vQRs5HZK8,5260
16
- felis/schemas/tap_schema_std.yaml,sha256=sPW-Vk72nY0PFpCvP5d8L8fWvhkif-x32sGtcfDZ8bU,7131
17
- felis/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- felis/tests/postgresql.py,sha256=B_xk4fLual5-viGDqP20r94okuc0pbSvytRH_L0fvMs,4035
19
- lsst_felis-28.2025.900.dist-info/COPYRIGHT,sha256=vJAFLFTSF1mhy9eIuA3P6R-3yxTWKQgpig88P-1IzRw,129
20
- lsst_felis-28.2025.900.dist-info/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
21
- lsst_felis-28.2025.900.dist-info/METADATA,sha256=vXu2ryU4MCMsZhNxs0_K_ZSjdIhJ_l0EgHn9H4qhef8,1410
22
- lsst_felis-28.2025.900.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
23
- lsst_felis-28.2025.900.dist-info/entry_points.txt,sha256=Gk2XFujA_Gp52VBk45g5kim8TDoMDJFPctsMqiq72EM,40
24
- lsst_felis-28.2025.900.dist-info/top_level.txt,sha256=F4SvPip3iZRVyISi50CHhwTIAokAhSxjWiVcn4IVWRI,6
25
- lsst_felis-28.2025.900.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
26
- lsst_felis-28.2025.900.dist-info/RECORD,,