lsst-felis 26.2024.1500__py3-none-any.whl → 26.2024.1700__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lsst-felis might be problematic. Click here for more details.

felis/cli.py CHANGED
@@ -183,6 +183,7 @@ def init_tap(
183
183
  @click.option("--tap-columns-table", help="Alt Table Name for TAP_SCHEMA.columns")
184
184
  @click.option("--tap-keys-table", help="Alt Table Name for TAP_SCHEMA.keys")
185
185
  @click.option("--tap-key-columns-table", help="Alt Table Name for TAP_SCHEMA.key_columns")
186
+ @click.option("--tap-schema-index", type=int, help="TAP_SCHEMA index of the schema")
186
187
  @click.argument("file", type=click.File())
187
188
  def load_tap(
188
189
  engine_url: str,
@@ -196,6 +197,7 @@ def load_tap(
196
197
  tap_columns_table: str,
197
198
  tap_keys_table: str,
198
199
  tap_key_columns_table: str,
200
+ tap_schema_index: int,
199
201
  file: io.TextIOBase,
200
202
  ) -> None:
201
203
  """Load TAP metadata from a Felis FILE.
@@ -203,28 +205,8 @@ def load_tap(
203
205
  This command loads the associated TAP metadata from a Felis FILE
204
206
  to the TAP_SCHEMA tables.
205
207
  """
206
- top_level_object = yaml.load(file, Loader=yaml.SafeLoader)
207
- schema_obj: dict
208
- if isinstance(top_level_object, dict):
209
- schema_obj = top_level_object
210
- if "@graph" not in schema_obj:
211
- schema_obj["@type"] = "felis:Schema"
212
- schema_obj["@context"] = DEFAULT_CONTEXT
213
- elif isinstance(top_level_object, list):
214
- schema_obj = {"@context": DEFAULT_CONTEXT, "@graph": top_level_object}
215
- else:
216
- logger.error("Schema object not of recognizable type")
217
- raise click.exceptions.Exit(1)
218
-
219
- normalized = _normalize(schema_obj, embed="@always")
220
- if len(normalized["@graph"]) > 1 and (schema_name or catalog_name):
221
- logger.error("--schema-name and --catalog-name incompatible with multiple schemas")
222
- raise click.exceptions.Exit(1)
223
-
224
- # Force normalized["@graph"] to a list, which is what happens when there's
225
- # multiple schemas
226
- if isinstance(normalized["@graph"], dict):
227
- normalized["@graph"] = [normalized["@graph"]]
208
+ yaml_data = yaml.load(file, Loader=yaml.SafeLoader)
209
+ schema = Schema.model_validate(yaml_data)
228
210
 
229
211
  tap_tables = init_tables(
230
212
  tap_schema_name,
@@ -243,28 +225,28 @@ def load_tap(
243
225
  # In Memory SQLite - Mostly used to test
244
226
  Tap11Base.metadata.create_all(engine)
245
227
 
246
- for schema in normalized["@graph"]:
247
- tap_visitor = TapLoadingVisitor(
248
- engine,
249
- catalog_name=catalog_name,
250
- schema_name=schema_name,
251
- tap_tables=tap_tables,
252
- )
253
- tap_visitor.visit_schema(schema)
228
+ tap_visitor = TapLoadingVisitor(
229
+ engine,
230
+ catalog_name=catalog_name,
231
+ schema_name=schema_name,
232
+ tap_tables=tap_tables,
233
+ tap_schema_index=tap_schema_index,
234
+ )
235
+ tap_visitor.visit_schema(schema)
254
236
  else:
255
237
  _insert_dump = InsertDump()
256
238
  conn = create_mock_engine(make_url(engine_url), executor=_insert_dump.dump, paramstyle="pyformat")
257
239
  # After the engine is created, update the executor with the dialect
258
240
  _insert_dump.dialect = conn.dialect
259
241
 
260
- for schema in normalized["@graph"]:
261
- tap_visitor = TapLoadingVisitor.from_mock_connection(
262
- conn,
263
- catalog_name=catalog_name,
264
- schema_name=schema_name,
265
- tap_tables=tap_tables,
266
- )
267
- tap_visitor.visit_schema(schema)
242
+ tap_visitor = TapLoadingVisitor.from_mock_connection(
243
+ conn,
244
+ catalog_name=catalog_name,
245
+ schema_name=schema_name,
246
+ tap_tables=tap_tables,
247
+ tap_schema_index=tap_schema_index,
248
+ )
249
+ tap_visitor.visit_schema(schema)
268
250
 
269
251
 
270
252
  @cli.command("modify-tap")
@@ -373,22 +355,37 @@ def merge(files: Iterable[io.TextIOBase]) -> None:
373
355
  type=click.Choice(["RSP", "default"]),
374
356
  default="default",
375
357
  )
376
- @click.option("-d", "--require-description", is_flag=True, help="Require description for all objects")
358
+ @click.option(
359
+ "-d", "--require-description", is_flag=True, help="Require description for all objects", default=False
360
+ )
361
+ @click.option(
362
+ "-t", "--check-redundant-datatypes", is_flag=True, help="Check for redundant datatypes", default=False
363
+ )
377
364
  @click.argument("files", nargs=-1, type=click.File())
378
- def validate(schema_name: str, require_description: bool, files: Iterable[io.TextIOBase]) -> None:
365
+ def validate(
366
+ schema_name: str,
367
+ require_description: bool,
368
+ check_redundant_datatypes: bool,
369
+ files: Iterable[io.TextIOBase],
370
+ ) -> None:
379
371
  """Validate one or more felis YAML files."""
380
372
  schema_class = get_schema(schema_name)
381
- logger.info(f"Using schema '{schema_class.__name__}'")
382
-
383
- if require_description:
384
- Schema.require_description(True)
373
+ if schema_name != "default":
374
+ logger.info(f"Using schema '{schema_class.__name__}'")
385
375
 
386
376
  rc = 0
387
377
  for file in files:
388
378
  file_name = getattr(file, "name", None)
389
379
  logger.info(f"Validating {file_name}")
390
380
  try:
391
- schema_class.model_validate(yaml.load(file, Loader=yaml.SafeLoader))
381
+ data = yaml.load(file, Loader=yaml.SafeLoader)
382
+ schema_class.model_validate(
383
+ data,
384
+ context={
385
+ "check_redundant_datatypes": check_redundant_datatypes,
386
+ "require_description": require_description,
387
+ },
388
+ )
392
389
  except ValidationError as e:
393
390
  logger.error(e)
394
391
  rc = 1
felis/datamodel.py CHANGED
@@ -22,13 +22,22 @@
22
22
  from __future__ import annotations
23
23
 
24
24
  import logging
25
+ import re
25
26
  from collections.abc import Mapping, Sequence
26
- from enum import Enum
27
+ from enum import StrEnum, auto
27
28
  from typing import Annotated, Any, Literal, TypeAlias
28
29
 
29
30
  from astropy import units as units # type: ignore
30
31
  from astropy.io.votable import ucd # type: ignore
31
- from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
32
+ from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator, model_validator
33
+ from sqlalchemy import dialects
34
+ from sqlalchemy import types as sqa_types
35
+ from sqlalchemy.engine import create_mock_engine
36
+ from sqlalchemy.engine.interfaces import Dialect
37
+ from sqlalchemy.types import TypeEngine
38
+
39
+ from .db.sqltypes import get_type_func
40
+ from .types import FelisType
32
41
 
33
42
  logger = logging.getLogger(__name__)
34
43
 
@@ -49,7 +58,6 @@ __all__ = (
49
58
  CONFIG = ConfigDict(
50
59
  populate_by_name=True, # Populate attributes by name.
51
60
  extra="forbid", # Do not allow extra fields.
52
- validate_assignment=True, # Validate assignments after model is created.
53
61
  str_strip_whitespace=True, # Strip whitespace from string fields.
54
62
  )
55
63
  """Pydantic model configuration as described in:
@@ -83,40 +91,85 @@ class BaseObject(BaseModel):
83
91
  """
84
92
 
85
93
  description: DescriptionStr | None = None
86
- """A description of the database object.
94
+ """A description of the database object."""
87
95
 
88
- By default, the description is optional but will be required if
89
- `BaseObject.Config.require_description` is set to `True` by the user.
90
- """
96
+ votable_utype: str | None = Field(None, alias="votable:utype")
97
+ """The VOTable utype (usage-specific or unique type) of the object."""
91
98
 
92
- @model_validator(mode="before")
93
- @classmethod
94
- def check_description(cls, values: dict[str, Any]) -> dict[str, Any]:
99
+ @model_validator(mode="after")
100
+ def check_description(self, info: ValidationInfo) -> BaseObject:
95
101
  """Check that the description is present if required."""
96
- if Schema.is_description_required():
97
- if "description" not in values or not values["description"]:
98
- raise ValueError("Description is required and must be non-empty")
99
- if len(values["description"].strip()) < DESCR_MIN_LENGTH:
100
- raise ValueError(f"Description must be at least {DESCR_MIN_LENGTH} characters long")
101
- return values
102
+ context = info.context
103
+ if not context or not context.get("require_description", False):
104
+ return self
105
+ if self.description is None or self.description == "":
106
+ raise ValueError("Description is required and must be non-empty")
107
+ if len(self.description) < DESCR_MIN_LENGTH:
108
+ raise ValueError(f"Description must be at least {DESCR_MIN_LENGTH} characters long")
109
+ return self
102
110
 
103
111
 
104
- class DataType(Enum):
112
+ class DataType(StrEnum):
105
113
  """`Enum` representing the data types supported by Felis."""
106
114
 
107
- BOOLEAN = "boolean"
108
- BYTE = "byte"
109
- SHORT = "short"
110
- INT = "int"
111
- LONG = "long"
112
- FLOAT = "float"
113
- DOUBLE = "double"
114
- CHAR = "char"
115
- STRING = "string"
116
- UNICODE = "unicode"
117
- TEXT = "text"
118
- BINARY = "binary"
119
- TIMESTAMP = "timestamp"
115
+ boolean = auto()
116
+ byte = auto()
117
+ short = auto()
118
+ int = auto()
119
+ long = auto()
120
+ float = auto()
121
+ double = auto()
122
+ char = auto()
123
+ string = auto()
124
+ unicode = auto()
125
+ text = auto()
126
+ binary = auto()
127
+ timestamp = auto()
128
+
129
+
130
+ _DIALECTS = {
131
+ "mysql": create_mock_engine("mysql://", executor=None).dialect,
132
+ "postgresql": create_mock_engine("postgresql://", executor=None).dialect,
133
+ }
134
+ """Dictionary of dialect names to SQLAlchemy dialects."""
135
+
136
+ _DIALECT_MODULES = {"mysql": getattr(dialects, "mysql"), "postgresql": getattr(dialects, "postgresql")}
137
+ """Dictionary of dialect names to SQLAlchemy dialect modules."""
138
+
139
+ _DATATYPE_REGEXP = re.compile(r"(\w+)(\((.*)\))?")
140
+ """Regular expression to match data types in the form "type(length)"""
141
+
142
+
143
+ def string_to_typeengine(
144
+ type_string: str, dialect: Dialect | None = None, length: int | None = None
145
+ ) -> TypeEngine:
146
+ match = _DATATYPE_REGEXP.search(type_string)
147
+ if not match:
148
+ raise ValueError(f"Invalid type string: {type_string}")
149
+
150
+ type_name, _, params = match.groups()
151
+ if dialect is None:
152
+ type_class = getattr(sqa_types, type_name.upper(), None)
153
+ else:
154
+ try:
155
+ dialect_module = _DIALECT_MODULES[dialect.name]
156
+ except KeyError:
157
+ raise ValueError(f"Unsupported dialect: {dialect}")
158
+ type_class = getattr(dialect_module, type_name.upper(), None)
159
+
160
+ if not type_class:
161
+ raise ValueError(f"Unsupported type: {type_class}")
162
+
163
+ if params:
164
+ params = [int(param) if param.isdigit() else param for param in params.split(",")]
165
+ type_obj = type_class(*params)
166
+ else:
167
+ type_obj = type_class()
168
+
169
+ if hasattr(type_obj, "length") and getattr(type_obj, "length") is None and length is not None:
170
+ type_obj.length = length
171
+
172
+ return type_obj
120
173
 
121
174
 
122
175
  class Column(BaseObject):
@@ -128,13 +181,8 @@ class Column(BaseObject):
128
181
  length: int | None = None
129
182
  """The length of the column."""
130
183
 
131
- nullable: bool | None = None
132
- """Whether the column can be ``NULL``.
133
-
134
- If `None`, this value was not set explicitly in the YAML data. In this
135
- case, it will be set to `False` for columns with numeric types and `True`
136
- otherwise.
137
- """
184
+ nullable: bool = True
185
+ """Whether the column can be ``NULL``."""
138
186
 
139
187
  value: Any = None
140
188
  """The default value of the column."""
@@ -171,12 +219,12 @@ class Column(BaseObject):
171
219
  """TAP_SCHEMA indication that this column is defined by an IVOA standard.
172
220
  """
173
221
 
174
- votable_utype: str | None = Field(None, alias="votable:utype")
175
- """The VOTable utype (usage-specific or unique type) of the column."""
176
-
177
222
  votable_xtype: str | None = Field(None, alias="votable:xtype")
178
223
  """The VOTable xtype (extended type) of the column."""
179
224
 
225
+ votable_datatype: str | None = Field(None, alias="votable:datatype")
226
+ """The VOTable datatype of the column."""
227
+
180
228
  @field_validator("ivoa_ucd")
181
229
  @classmethod
182
230
  def check_ivoa_ucd(cls, ivoa_ucd: str) -> str:
@@ -207,6 +255,57 @@ class Column(BaseObject):
207
255
 
208
256
  return values
209
257
 
258
+ @model_validator(mode="after") # type: ignore[arg-type]
259
+ @classmethod
260
+ def validate_datatypes(cls, col: Column, info: ValidationInfo) -> Column:
261
+ """Check for redundant datatypes on columns."""
262
+ context = info.context
263
+ if not context or not context.get("check_redundant_datatypes", False):
264
+ return col
265
+ if all(getattr(col, f"{dialect}:datatype", None) is not None for dialect in _DIALECTS.keys()):
266
+ return col
267
+
268
+ datatype = col.datatype
269
+ length: int | None = col.length or None
270
+
271
+ datatype_func = get_type_func(datatype)
272
+ felis_type = FelisType.felis_type(datatype)
273
+ if felis_type.is_sized:
274
+ if length is not None:
275
+ datatype_obj = datatype_func(length)
276
+ else:
277
+ raise ValueError(f"Length must be provided for sized type '{datatype}' in column '{col.id}'")
278
+ else:
279
+ datatype_obj = datatype_func()
280
+
281
+ for dialect_name, dialect in _DIALECTS.items():
282
+ db_annotation = f"{dialect_name}_datatype"
283
+ if datatype_string := col.model_dump().get(db_annotation):
284
+ db_datatype_obj = string_to_typeengine(datatype_string, dialect, length)
285
+ if datatype_obj.compile(dialect) == db_datatype_obj.compile(dialect):
286
+ raise ValueError(
287
+ "'{}: {}' is a redundant override of 'datatype: {}' in column '{}'{}".format(
288
+ db_annotation,
289
+ datatype_string,
290
+ col.datatype,
291
+ col.id,
292
+ "" if length is None else f" with length {length}",
293
+ )
294
+ )
295
+ else:
296
+ logger.debug(
297
+ "Type override of 'datatype: {}' with '{}: {}' in column '{}' "
298
+ "compiled to '{}' and '{}'".format(
299
+ col.datatype,
300
+ db_annotation,
301
+ datatype_string,
302
+ col.id,
303
+ datatype_obj.compile(dialect),
304
+ db_datatype_obj.compile(dialect),
305
+ )
306
+ )
307
+ return col
308
+
210
309
 
211
310
  class Constraint(BaseObject):
212
311
  """A database table constraint."""
@@ -404,15 +503,6 @@ class SchemaIdVisitor:
404
503
  class Schema(BaseObject):
405
504
  """The database schema containing the tables."""
406
505
 
407
- class ValidationConfig:
408
- """Validation configuration which is specific to Felis."""
409
-
410
- _require_description = False
411
- """Flag to require a description for all objects.
412
-
413
- This is set by the `require_description` class method.
414
- """
415
-
416
506
  version: SchemaVersion | str | None = None
417
507
  """The version of the schema."""
418
508
 
@@ -430,21 +520,29 @@ class Schema(BaseObject):
430
520
  raise ValueError("Table names must be unique")
431
521
  return tables
432
522
 
433
- @model_validator(mode="after")
434
- def create_id_map(self: Schema) -> Schema:
435
- """Create a map of IDs to objects."""
523
+ def _create_id_map(self: Schema) -> Schema:
524
+ """Create a map of IDs to objects.
525
+
526
+ This method should not be called by users. It is called automatically
527
+ by the ``model_post_init()`` method. If the ID map is already
528
+ populated, this method will return immediately.
529
+ """
436
530
  if len(self.id_map):
437
- logger.debug("ID map was already populated")
531
+ logger.debug("Ignoring call to create_id_map() - ID map was already populated")
438
532
  return self
439
533
  visitor: SchemaIdVisitor = SchemaIdVisitor()
440
534
  visitor.visit_schema(self)
441
- logger.debug(f"ID map contains {len(self.id_map.keys())} objects")
535
+ logger.debug(f"Created schema ID map with {len(self.id_map.keys())} objects")
442
536
  if len(visitor.duplicates):
443
537
  raise ValueError(
444
538
  "Duplicate IDs found in schema:\n " + "\n ".join(visitor.duplicates) + "\n"
445
539
  )
446
540
  return self
447
541
 
542
+ def model_post_init(self, ctx: Any) -> None:
543
+ """Post-initialization hook for the model."""
544
+ self._create_id_map()
545
+
448
546
  def __getitem__(self, id: str) -> BaseObject:
449
547
  """Get an object by its ID."""
450
548
  if id not in self:
@@ -454,20 +552,3 @@ class Schema(BaseObject):
454
552
  def __contains__(self, id: str) -> bool:
455
553
  """Check if an object with the given ID is in the schema."""
456
554
  return id in self.id_map
457
-
458
- @classmethod
459
- def require_description(cls, rd: bool = True) -> None:
460
- """Set whether a description is required for all objects.
461
-
462
- This includes the schema, tables, columns, and constraints.
463
-
464
- Users should call this method to set the requirement for a description
465
- when validating schemas, rather than change the flag value directly.
466
- """
467
- logger.debug(f"Setting description requirement to '{rd}'")
468
- cls.ValidationConfig._require_description = rd
469
-
470
- @classmethod
471
- def is_description_required(cls) -> bool:
472
- """Return whether a description is required for all objects."""
473
- return cls.ValidationConfig._require_description
felis/db/_variants.py CHANGED
@@ -40,10 +40,10 @@ TABLE_OPTS = {
40
40
  }
41
41
 
42
42
  COLUMN_VARIANT_OVERRIDE = {
43
- "mysql:datatype": "mysql",
44
- "oracle:datatype": "oracle",
45
- "postgresql:datatype": "postgresql",
46
- "sqlite:datatype": "sqlite",
43
+ "mysql_datatype": "mysql",
44
+ "oracle_datatype": "oracle",
45
+ "postgresql_datatype": "postgresql",
46
+ "sqlite_datatype": "sqlite",
47
47
  }
48
48
 
49
49
  DIALECT_MODULES = {MYSQL: mysql, ORACLE: oracle, SQLITE: sqlite, POSTGRES: postgresql}
@@ -87,7 +87,7 @@ def make_variant_dict(column_obj: Column) -> dict[str, TypeEngine[Any]]:
87
87
  """
88
88
  variant_dict = {}
89
89
  for field_name, value in iter(column_obj):
90
- if field_name in COLUMN_VARIANT_OVERRIDE:
90
+ if field_name in COLUMN_VARIANT_OVERRIDE and value is not None:
91
91
  dialect = COLUMN_VARIANT_OVERRIDE[field_name]
92
92
  variant: TypeEngine = process_variant_override(dialect, value)
93
93
  variant_dict[dialect] = variant
felis/db/sqltypes.py CHANGED
@@ -21,9 +21,9 @@
21
21
 
22
22
  import builtins
23
23
  from collections.abc import Mapping
24
- from typing import Any
24
+ from typing import Any, Callable
25
25
 
26
- from sqlalchemy import Float, SmallInteger, types
26
+ from sqlalchemy import SmallInteger, types
27
27
  from sqlalchemy.dialects import mysql, oracle, postgresql
28
28
  from sqlalchemy.ext.compiler import compiles
29
29
 
@@ -39,27 +39,15 @@ class TINYINT(SmallInteger):
39
39
  __visit_name__ = "TINYINT"
40
40
 
41
41
 
42
- class DOUBLE(Float):
43
- """The non-standard DOUBLE type."""
44
-
45
- __visit_name__ = "DOUBLE"
46
-
47
-
48
42
  @compiles(TINYINT)
49
43
  def compile_tinyint(type_: Any, compiler: Any, **kw: Any) -> str:
50
44
  """Return type name for TINYINT."""
51
45
  return "TINYINT"
52
46
 
53
47
 
54
- @compiles(DOUBLE)
55
- def compile_double(type_: Any, compiler: Any, **kw: Any) -> str:
56
- """Return type name for double precision type."""
57
- return "DOUBLE"
58
-
59
-
60
48
  _TypeMap = Mapping[str, types.TypeEngine | type[types.TypeEngine]]
61
49
 
62
- boolean_map: _TypeMap = {MYSQL: mysql.BIT(1), ORACLE: oracle.NUMBER(1), POSTGRES: postgresql.BOOLEAN()}
50
+ boolean_map: _TypeMap = {MYSQL: mysql.BOOLEAN, ORACLE: oracle.NUMBER(1), POSTGRES: postgresql.BOOLEAN()}
63
51
 
64
52
  byte_map: _TypeMap = {
65
53
  MYSQL: mysql.TINYINT(),
@@ -160,7 +148,7 @@ def float(**kwargs: Any) -> types.TypeEngine:
160
148
 
161
149
  def double(**kwargs: Any) -> types.TypeEngine:
162
150
  """Return SQLAlchemy type for double precision float."""
163
- return _vary(DOUBLE(), double_map, kwargs)
151
+ return _vary(types.DOUBLE(), double_map, kwargs)
164
152
 
165
153
 
166
154
  def char(length: builtins.int, **kwargs: Any) -> types.TypeEngine:
@@ -178,9 +166,9 @@ def unicode(length: builtins.int, **kwargs: Any) -> types.TypeEngine:
178
166
  return _vary(types.NVARCHAR(length), unicode_map, kwargs, length)
179
167
 
180
168
 
181
- def text(length: builtins.int, **kwargs: Any) -> types.TypeEngine:
169
+ def text(**kwargs: Any) -> types.TypeEngine:
182
170
  """Return SQLAlchemy type for text."""
183
- return _vary(types.CLOB(length), text_map, kwargs, length)
171
+ return _vary(types.TEXT(), text_map, kwargs)
184
172
 
185
173
 
186
174
  def binary(length: builtins.int, **kwargs: Any) -> types.TypeEngine:
@@ -193,6 +181,13 @@ def timestamp(**kwargs: Any) -> types.TypeEngine:
193
181
  return types.TIMESTAMP()
194
182
 
195
183
 
184
+ def get_type_func(type_name: str) -> Callable:
185
+ """Return the function for the type with the given name."""
186
+ if type_name not in globals():
187
+ raise ValueError(f"Unknown type: {type_name}")
188
+ return globals()[type_name]
189
+
190
+
196
191
  def _vary(
197
192
  type_: types.TypeEngine,
198
193
  variant_map: _TypeMap,
@@ -203,7 +198,7 @@ def _vary(
203
198
  variants.update(overrides)
204
199
  for dialect, variant in variants.items():
205
200
  # If this is a class and not an instance, instantiate
206
- if isinstance(variant, type):
201
+ if callable(variant):
207
202
  variant = variant(*args)
208
203
  type_ = type_.with_variant(variant, dialect)
209
204
  return type_
felis/metadata.py CHANGED
@@ -34,7 +34,6 @@ from sqlalchemy import (
34
34
  ForeignKeyConstraint,
35
35
  Index,
36
36
  MetaData,
37
- Numeric,
38
37
  PrimaryKeyConstraint,
39
38
  ResultProxy,
40
39
  Table,
@@ -265,17 +264,12 @@ class MetaDataBuilder:
265
264
  id = column_obj.id
266
265
  description = column_obj.description
267
266
  default = column_obj.value
267
+ nullable = column_obj.nullable
268
268
 
269
- # Handle variant overrides for the column (e.g., "mysql:datatype").
269
+ # Get datatype, handling variant overrides such as "mysql:datatype".
270
270
  datatype = get_datatype_with_variants(column_obj)
271
271
 
272
- # Set default value of nullable based on column type and then whether
273
- # it was explicitly provided in the schema data.
274
- nullable = column_obj.nullable
275
- if nullable is None:
276
- nullable = False if isinstance(datatype, Numeric) else True
277
-
278
- # Set autoincrement depending on if it was provided explicitly.
272
+ # Set autoincrement, depending on if it was provided explicitly.
279
273
  autoincrement: Literal["auto"] | bool = (
280
274
  column_obj.autoincrement if column_obj.autoincrement is not None else "auto"
281
275
  )
felis/tap.py CHANGED
@@ -24,7 +24,7 @@ from __future__ import annotations
24
24
  __all__ = ["Tap11Base", "TapLoadingVisitor", "init_tables"]
25
25
 
26
26
  import logging
27
- from collections.abc import Iterable, Mapping, MutableMapping
27
+ from collections.abc import Iterable, MutableMapping
28
28
  from typing import Any
29
29
 
30
30
  from sqlalchemy import Column, Integer, String
@@ -34,14 +34,13 @@ from sqlalchemy.orm import Session, declarative_base, sessionmaker
34
34
  from sqlalchemy.schema import MetaData
35
35
  from sqlalchemy.sql.expression import Insert, insert
36
36
 
37
- from .check import FelisValidator
38
- from .types import FelisType
39
- from .visitor import Visitor
37
+ from felis import datamodel
40
38
 
41
- _Mapping = Mapping[str, Any]
39
+ from .datamodel import Constraint, Index, Schema, Table
40
+ from .types import FelisType
42
41
 
43
42
  Tap11Base: Any = declarative_base() # Any to avoid mypy mess with SA 2
44
- logger = logging.getLogger("felis")
43
+ logger = logging.getLogger(__name__)
45
44
 
46
45
  IDENTIFIER_LENGTH = 128
47
46
  SMALL_FIELD_LENGTH = 32
@@ -133,7 +132,7 @@ def init_tables(
133
132
  )
134
133
 
135
134
 
136
- class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]):
135
+ class TapLoadingVisitor:
137
136
  """Felis schema visitor for generating TAP schema.
138
137
 
139
138
  Parameters
@@ -154,6 +153,7 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
154
153
  catalog_name: str | None = None,
155
154
  schema_name: str | None = None,
156
155
  tap_tables: MutableMapping[str, Any] | None = None,
156
+ tap_schema_index: int | None = None,
157
157
  ):
158
158
  self.graph_index: MutableMapping[str, Any] = {}
159
159
  self.catalog_name = catalog_name
@@ -161,7 +161,7 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
161
161
  self.engine = engine
162
162
  self._mock_connection: MockConnection | None = None
163
163
  self.tables = tap_tables or init_tables()
164
- self.checker = FelisValidator()
164
+ self.tap_schema_index = tap_schema_index
165
165
 
166
166
  @classmethod
167
167
  def from_mock_connection(
@@ -170,30 +170,30 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
170
170
  catalog_name: str | None = None,
171
171
  schema_name: str | None = None,
172
172
  tap_tables: MutableMapping[str, Any] | None = None,
173
+ tap_schema_index: int | None = None,
173
174
  ) -> TapLoadingVisitor:
174
175
  visitor = cls(engine=None, catalog_name=catalog_name, schema_name=schema_name, tap_tables=tap_tables)
175
176
  visitor._mock_connection = mock_connection
177
+ visitor.tap_schema_index = tap_schema_index
176
178
  return visitor
177
179
 
178
- def visit_schema(self, schema_obj: _Mapping) -> None:
179
- self.checker.check_schema(schema_obj)
180
- if (version_obj := schema_obj.get("version")) is not None:
181
- self.visit_schema_version(version_obj, schema_obj)
180
+ def visit_schema(self, schema_obj: Schema) -> None:
182
181
  schema = self.tables["schemas"]()
183
182
  # Override with default
184
- self.schema_name = self.schema_name or schema_obj["name"]
183
+ self.schema_name = self.schema_name or schema_obj.name
185
184
 
186
185
  schema.schema_name = self._schema_name()
187
- schema.description = schema_obj.get("description")
188
- schema.utype = schema_obj.get("votable:utype")
189
- schema.schema_index = int(schema_obj.get("tap:schema_index", 0))
186
+ schema.description = schema_obj.description
187
+ schema.utype = schema_obj.votable_utype
188
+ schema.schema_index = self.tap_schema_index
189
+ logger.debug("Set TAP_SCHEMA index: {}".format(self.tap_schema_index))
190
190
 
191
191
  if self.engine is not None:
192
192
  session: Session = sessionmaker(self.engine)()
193
193
 
194
194
  session.add(schema)
195
195
 
196
- for table_obj in schema_obj["tables"]:
196
+ for table_obj in schema_obj.tables:
197
197
  table, columns = self.visit_table(table_obj, schema_obj)
198
198
  session.add(table)
199
199
  session.add_all(columns)
@@ -202,6 +202,8 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
202
202
  session.add_all(keys)
203
203
  session.add_all(key_columns)
204
204
 
205
+ logger.debug("Committing TAP schema: %s", schema_obj.name)
206
+ logger.debug("TAP tables: %s", len(self.tables))
205
207
  session.commit()
206
208
  else:
207
209
  logger.info("Dry run, not inserting into database")
@@ -211,7 +213,7 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
211
213
  conn = self._mock_connection
212
214
  conn.execute(_insert(self.tables["schemas"], schema))
213
215
 
214
- for table_obj in schema_obj["tables"]:
216
+ for table_obj in schema_obj.tables:
215
217
  table, columns = self.visit_table(table_obj, schema_obj)
216
218
  conn.execute(_insert(self.tables["tables"], table))
217
219
  for column in columns:
@@ -223,56 +225,45 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
223
225
  for key_column in key_columns:
224
226
  conn.execute(_insert(self.tables["key_columns"], key_column))
225
227
 
226
- def visit_constraints(self, schema_obj: _Mapping) -> tuple:
228
+ def visit_constraints(self, schema_obj: Schema) -> tuple:
227
229
  all_keys = []
228
230
  all_key_columns = []
229
- for table_obj in schema_obj["tables"]:
230
- for c in table_obj.get("constraints", []):
231
- key, key_columns = self.visit_constraint(c, table_obj)
231
+ for table_obj in schema_obj.tables:
232
+ for c in table_obj.constraints:
233
+ key, key_columns = self.visit_constraint(c)
232
234
  if not key:
233
235
  continue
234
236
  all_keys.append(key)
235
237
  all_key_columns += key_columns
236
238
  return all_keys, all_key_columns
237
239
 
238
- def visit_schema_version(
239
- self, version_obj: str | Mapping[str, Any], schema_obj: Mapping[str, Any]
240
- ) -> None:
241
- # Docstring is inherited.
242
-
243
- # For now we ignore schema versioning completely, still do some checks.
244
- self.checker.check_schema_version(version_obj, schema_obj)
245
-
246
- def visit_table(self, table_obj: _Mapping, schema_obj: _Mapping) -> tuple:
247
- self.checker.check_table(table_obj, schema_obj)
248
- table_id = table_obj["@id"]
240
+ def visit_table(self, table_obj: Table, schema_obj: Schema) -> tuple:
241
+ table_id = table_obj.id
249
242
  table = self.tables["tables"]()
250
243
  table.schema_name = self._schema_name()
251
- table.table_name = self._table_name(table_obj["name"])
244
+ table.table_name = self._table_name(table_obj.name)
252
245
  table.table_type = "table"
253
- table.utype = table_obj.get("votable:utype")
254
- table.description = table_obj.get("description")
255
- table.table_index = int(table_obj.get("tap:table_index", 0))
246
+ table.utype = table_obj.votable_utype
247
+ table.description = table_obj.description
248
+ table.table_index = 0 if table_obj.tap_table_index is None else table_obj.tap_table_index
256
249
 
257
- columns = [self.visit_column(c, table_obj) for c in table_obj["columns"]]
258
- self.visit_primary_key(table_obj.get("primaryKey", []), table_obj)
250
+ columns = [self.visit_column(c, table_obj) for c in table_obj.columns]
251
+ self.visit_primary_key(table_obj.primary_key, table_obj)
259
252
 
260
- for i in table_obj.get("indexes", []):
253
+ for i in table_obj.indexes:
261
254
  self.visit_index(i, table)
262
255
 
263
256
  self.graph_index[table_id] = table
264
257
  return table, columns
265
258
 
266
- def check_column(self, column_obj: _Mapping, table_obj: _Mapping) -> None:
267
- self.checker.check_column(column_obj, table_obj)
268
- _id = column_obj["@id"]
269
- # Guaranteed to exist at this point, for mypy use "" as default
270
- datatype_name = column_obj.get("datatype", "")
271
- felis_type = FelisType.felis_type(datatype_name)
259
+ def check_column(self, column_obj: datamodel.Column) -> None:
260
+ _id = column_obj.id
261
+ datatype_name = column_obj.datatype
262
+ felis_type = FelisType.felis_type(datatype_name.value)
272
263
  if felis_type.is_sized:
273
264
  # It is expected that both arraysize and length are fine for
274
265
  # length types.
275
- arraysize = column_obj.get("votable:arraysize", column_obj.get("length"))
266
+ arraysize = column_obj.votable_arraysize or column_obj.length
276
267
  if arraysize is None:
277
268
  logger.warning(
278
269
  f"votable:arraysize and length for {_id} are None for type {datatype_name}. "
@@ -283,7 +274,7 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
283
274
  # datetime types really should have a votable:arraysize, because
284
275
  # they are converted to strings and the `length` is loosely to the
285
276
  # string size
286
- if "votable:arraysize" not in column_obj:
277
+ if not column_obj.votable_arraysize:
287
278
  logger.warning(
288
279
  f"votable:arraysize for {_id} is None for type {datatype_name}. "
289
280
  f'Using length "*". '
@@ -291,47 +282,45 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
291
282
  "materialized datetime/timestamp strings."
292
283
  )
293
284
 
294
- def visit_column(self, column_obj: _Mapping, table_obj: _Mapping) -> Tap11Base:
295
- self.check_column(column_obj, table_obj)
296
- column_id = column_obj["@id"]
297
- table_name = self._table_name(table_obj["name"])
285
+ def visit_column(self, column_obj: datamodel.Column, table_obj: Table) -> Tap11Base:
286
+ self.check_column(column_obj)
287
+ column_id = column_obj.id
288
+ table_name = self._table_name(table_obj.name)
298
289
 
299
290
  column = self.tables["columns"]()
300
291
  column.table_name = table_name
301
- column.column_name = column_obj["name"]
292
+ column.column_name = column_obj.name
302
293
 
303
- felis_datatype = column_obj["datatype"]
304
- felis_type = FelisType.felis_type(felis_datatype)
305
- column.datatype = column_obj.get("votable:datatype", felis_type.votable_name)
294
+ felis_datatype = column_obj.datatype
295
+ felis_type = FelisType.felis_type(felis_datatype.value)
296
+ column.datatype = column_obj.votable_datatype or felis_type.votable_name
306
297
 
307
298
  arraysize = None
308
299
  if felis_type.is_sized:
309
- # prefer votable:arraysize to length, fall back to `*`
310
- arraysize = column_obj.get("votable:arraysize", column_obj.get("length", "*"))
300
+ arraysize = column_obj.votable_arraysize or column_obj.length or "*"
311
301
  if felis_type.is_timestamp:
312
- arraysize = column_obj.get("votable:arraysize", "*")
302
+ arraysize = column_obj.votable_arraysize or "*"
313
303
  column.arraysize = arraysize
314
304
 
315
- column.xtype = column_obj.get("votable:xtype")
316
- column.description = column_obj.get("description")
317
- column.utype = column_obj.get("votable:utype")
305
+ column.xtype = column_obj.votable_xtype
306
+ column.description = column_obj.description
307
+ column.utype = column_obj.votable_utype
318
308
 
319
- unit = column_obj.get("ivoa:unit") or column_obj.get("fits:tunit")
309
+ unit = column_obj.ivoa_unit or column_obj.fits_tunit
320
310
  column.unit = unit
321
- column.ucd = column_obj.get("ivoa:ucd")
311
+ column.ucd = column_obj.ivoa_ucd
322
312
 
323
313
  # We modify this after we process columns
324
314
  column.indexed = 0
325
315
 
326
- column.principal = column_obj.get("tap:principal", 0)
327
- column.std = column_obj.get("tap:std", 0)
328
- column.column_index = column_obj.get("tap:column_index")
316
+ column.principal = column_obj.tap_principal
317
+ column.std = column_obj.tap_std
318
+ column.column_index = column_obj.tap_column_index
329
319
 
330
320
  self.graph_index[column_id] = column
331
321
  return column
332
322
 
333
- def visit_primary_key(self, primary_key_obj: str | Iterable[str], table_obj: _Mapping) -> None:
334
- self.checker.check_primary_key(primary_key_obj, table_obj)
323
+ def visit_primary_key(self, primary_key_obj: str | Iterable[str] | None, table_obj: Table) -> None:
335
324
  if primary_key_obj:
336
325
  if isinstance(primary_key_obj, str):
337
326
  primary_key_obj = [primary_key_obj]
@@ -341,19 +330,18 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
341
330
  columns[0].indexed = 1
342
331
  return None
343
332
 
344
- def visit_constraint(self, constraint_obj: _Mapping, table_obj: _Mapping) -> tuple:
345
- self.checker.check_constraint(constraint_obj, table_obj)
346
- constraint_type = constraint_obj["@type"]
333
+ def visit_constraint(self, constraint_obj: Constraint) -> tuple:
334
+ constraint_type = constraint_obj.type
347
335
  key = None
348
336
  key_columns = []
349
337
  if constraint_type == "ForeignKey":
350
- constraint_name = constraint_obj["name"]
351
- description = constraint_obj.get("description")
352
- utype = constraint_obj.get("votable:utype")
338
+ constraint_name = constraint_obj.name
339
+ description = constraint_obj.description
340
+ utype = constraint_obj.votable_utype
353
341
 
354
- columns = [self.graph_index[col["@id"]] for col in constraint_obj.get("columns", [])]
342
+ columns = [self.graph_index[col_id] for col_id in getattr(constraint_obj, "columns", [])]
355
343
  refcolumns = [
356
- self.graph_index[refcol["@id"]] for refcol in constraint_obj.get("referencedColumns", [])
344
+ self.graph_index[refcol_id] for refcol_id in getattr(constraint_obj, "referenced_columns", [])
357
345
  ]
358
346
 
359
347
  table_name = None
@@ -386,9 +374,8 @@ class TapLoadingVisitor(Visitor[None, tuple, Tap11Base, None, tuple, None, None]
386
374
  key_columns.append(key_column)
387
375
  return key, key_columns
388
376
 
389
- def visit_index(self, index_obj: _Mapping, table_obj: _Mapping) -> None:
390
- self.checker.check_index(index_obj, table_obj)
391
- columns = [self.graph_index[col["@id"]] for col in index_obj.get("columns", [])]
377
+ def visit_index(self, index_obj: Index, table_obj: Table) -> None:
378
+ columns = [self.graph_index[col_id] for col_id in getattr(index_obj, "columns", [])]
392
379
  # if just one column and it's indexed, update the object
393
380
  if len(columns) == 1:
394
381
  columns[0].indexed = 1
felis/types.py CHANGED
@@ -125,7 +125,7 @@ class Unicode(FelisType, felis_name="unicode", votable_name="unicodeChar", is_si
125
125
  """Felis definition of unicode string type."""
126
126
 
127
127
 
128
- class Text(FelisType, felis_name="text", votable_name="unicodeChar", is_sized=True):
128
+ class Text(FelisType, felis_name="text", votable_name="char"):
129
129
  """Felis definition of text type."""
130
130
 
131
131
 
felis/version.py CHANGED
@@ -1,2 +1,2 @@
1
1
  __all__ = ["__version__"]
2
- __version__ = "26.2024.1500"
2
+ __version__ = "26.2024.1700"
@@ -1,10 +1,11 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lsst-felis
3
- Version: 26.2024.1500
3
+ Version: 26.2024.1700
4
4
  Summary: A vocabulary for describing catalogs and acting on those descriptions
5
5
  Author-email: Rubin Observatory Data Management <dm-admin@lists.lsst.org>
6
6
  License: GNU General Public License v3 or later (GPLv3+)
7
- Project-URL: Homepage, https://github.com/lsst/felis
7
+ Project-URL: Homepage, https://felis.lsst.io
8
+ Project-URL: Source, https://github.com/lsst/felis
8
9
  Keywords: lsst
9
10
  Classifier: Intended Audience :: Science/Research
10
11
  Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
@@ -24,6 +25,8 @@ Requires-Dist: pyyaml >=6
24
25
  Requires-Dist: pyld >=2
25
26
  Requires-Dist: pydantic <3,>=2
26
27
  Requires-Dist: lsst-utils
28
+ Provides-Extra: dev
29
+ Requires-Dist: documenteer[guide] ; extra == 'dev'
27
30
  Provides-Extra: test
28
31
  Requires-Dist: pytest >=3.2 ; extra == 'test'
29
32
 
@@ -0,0 +1,24 @@
1
+ felis/__init__.py,sha256=_Pw-QKMYj0WRgE8fW2N2pBXJUj-Pjv8dSKJBzykjyZU,1842
2
+ felis/check.py,sha256=RBxXq7XwPGIucrs1PPgPtgk8MrWAJlOmoxCNySEz9-I,13892
3
+ felis/cli.py,sha256=VBlEoo65Y5lPf7wzokuZeW1HL-ObfIrrVzo155RZK0Q,16202
4
+ felis/datamodel.py,sha256=VEymalSBkVmTPWL-xm1DPp6fAR2ze0KvFRGwVf2YR08,19315
5
+ felis/metadata.py,sha256=gcBjpB_JpEfKXpKc7hMwG11PizqeDR8IJaFfhFtBIEw,18294
6
+ felis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ felis/simple.py,sha256=yzv_aoZrZhfakd1Xm7gLDeVKyJjCDZ7wAyYYp-l_Sxs,14414
8
+ felis/tap.py,sha256=MOqe9_K6KK9oUqEuKyKtSLFrzTluohbcvQ59DeoRiLY,16803
9
+ felis/types.py,sha256=z_ECfSxpqiFSGppjxKwCO4fPP7TLBaIN3Qo1AGF16Go,4418
10
+ felis/utils.py,sha256=tYxr0xFdPN4gDHibeAD9d5DFgU8hKlSZVKmZoDzi4e8,4164
11
+ felis/validation.py,sha256=f9VKvp7q-cnim2D5voTKwCdt0NRsYBpTwom1Z_3OKkc,3469
12
+ felis/version.py,sha256=PH-FIbvh4hvlcfNHl8Bt-EVFDlE5_dasGcUm02k5uxI,55
13
+ felis/visitor.py,sha256=EazU4nYbkKBj3mCZYvsTCBTNmh0qRaUNZIzCcM3dqOQ,6439
14
+ felis/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ felis/db/_variants.py,sha256=zCuXDgU_x_pTZcWkBLgqQCiOhlA6y2tBt-PUQfafwmM,3368
16
+ felis/db/sqltypes.py,sha256=n6E1K-Hvdt62GVBQedefzTqBpNS7ks92lchRM5BF_Oo,5735
17
+ lsst_felis-26.2024.1700.dist-info/COPYRIGHT,sha256=bUmNy19uUxqITMpjeHFe69q3IzQpjxvvBw6oV7kR7ho,129
18
+ lsst_felis-26.2024.1700.dist-info/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
19
+ lsst_felis-26.2024.1700.dist-info/METADATA,sha256=e4j5d28w8iR-ZzYbhvD_tSKHAIC7Gx07rSW4llYDaa8,1215
20
+ lsst_felis-26.2024.1700.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
21
+ lsst_felis-26.2024.1700.dist-info/entry_points.txt,sha256=Gk2XFujA_Gp52VBk45g5kim8TDoMDJFPctsMqiq72EM,40
22
+ lsst_felis-26.2024.1700.dist-info/top_level.txt,sha256=F4SvPip3iZRVyISi50CHhwTIAokAhSxjWiVcn4IVWRI,6
23
+ lsst_felis-26.2024.1700.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
24
+ lsst_felis-26.2024.1700.dist-info/RECORD,,
@@ -1,24 +0,0 @@
1
- felis/__init__.py,sha256=_Pw-QKMYj0WRgE8fW2N2pBXJUj-Pjv8dSKJBzykjyZU,1842
2
- felis/check.py,sha256=RBxXq7XwPGIucrs1PPgPtgk8MrWAJlOmoxCNySEz9-I,13892
3
- felis/cli.py,sha256=YeGSiA3ywPVMMdB1YxH1_Gdac1kl4oPJvJtajfCs5VU,16637
4
- felis/datamodel.py,sha256=ooNSg68OuNk89EVu1MtxupLUWgSyzmb50wjza1joDO4,16002
5
- felis/metadata.py,sha256=5DE2YMnu6YuhwntBSe-OheCD7C2-vA4yb64BpjTC68A,18542
6
- felis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- felis/simple.py,sha256=yzv_aoZrZhfakd1Xm7gLDeVKyJjCDZ7wAyYYp-l_Sxs,14414
8
- felis/tap.py,sha256=RBwEKyU3S0oXSNIMoI2WRAuC9WB5eai9BdQQUYN5Qdc,17704
9
- felis/types.py,sha256=1GL6IkHcIsIydiyw1eX98REh-lWCVIRO-9qjmaZfqvw,4440
10
- felis/utils.py,sha256=tYxr0xFdPN4gDHibeAD9d5DFgU8hKlSZVKmZoDzi4e8,4164
11
- felis/validation.py,sha256=f9VKvp7q-cnim2D5voTKwCdt0NRsYBpTwom1Z_3OKkc,3469
12
- felis/version.py,sha256=v0ZRn4V9ZMSKzM31IPNIMtcYAuVRbtzceeMS49b6RIE,55
13
- felis/visitor.py,sha256=EazU4nYbkKBj3mCZYvsTCBTNmh0qRaUNZIzCcM3dqOQ,6439
14
- felis/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- felis/db/_variants.py,sha256=aW0Q7R4KEtxLR7VMashQjDLWdzDNrMVAH521MSvMey0,3346
16
- felis/db/sqltypes.py,sha256=0HOEqvL0OailGP-j6Jj5tnOSu_Pt7Hi29PPof4Q5d2c,5787
17
- lsst_felis-26.2024.1500.dist-info/COPYRIGHT,sha256=bUmNy19uUxqITMpjeHFe69q3IzQpjxvvBw6oV7kR7ho,129
18
- lsst_felis-26.2024.1500.dist-info/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
19
- lsst_felis-26.2024.1500.dist-info/METADATA,sha256=o3cHiTSh5w6RHPvPAIG1sXMQZ6t1xW4lkNrgbMzMVlk,1101
20
- lsst_felis-26.2024.1500.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
21
- lsst_felis-26.2024.1500.dist-info/entry_points.txt,sha256=Gk2XFujA_Gp52VBk45g5kim8TDoMDJFPctsMqiq72EM,40
22
- lsst_felis-26.2024.1500.dist-info/top_level.txt,sha256=F4SvPip3iZRVyISi50CHhwTIAokAhSxjWiVcn4IVWRI,6
23
- lsst_felis-26.2024.1500.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
24
- lsst_felis-26.2024.1500.dist-info/RECORD,,