linkml 1.6.4__py3-none-any.whl → 1.6.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import logging
2
- import os
3
- from dataclasses import dataclass
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
4
  from typing import List
5
5
 
6
6
  import click
@@ -8,78 +8,80 @@ from linkml_runtime.utils.schemaview import SchemaView
8
8
  from openpyxl import Workbook
9
9
  from openpyxl.utils import get_column_letter
10
10
  from openpyxl.worksheet.datavalidation import DataValidation
11
- from openpyxl.worksheet.worksheet import Worksheet
12
11
 
13
12
  from linkml._version import __version__
14
13
  from linkml.utils.generator import Generator, shared_arguments
15
- from linkml.utils.helpers import convert_to_snake_case
16
14
 
17
15
 
18
16
  @dataclass
19
17
  class ExcelGenerator(Generator):
20
18
  # ClassVars
21
- generatorname = os.path.basename(__file__)
19
+ generatorname = Path(__file__).name
22
20
  generatorversion = "0.1.1"
23
21
  valid_formats = ["xlsx"]
24
22
  uses_schemaloader = False
25
23
  requires_metamodel = False
26
24
 
25
+ split_workbook_by_class: bool = field(default_factory=lambda: False)
26
+
27
27
  def __post_init__(self) -> None:
28
28
  super().__post_init__()
29
29
  self.logger = logging.getLogger(__name__)
30
30
  self.schemaview = SchemaView(self.schema)
31
31
 
32
- def create_workbook(self, workbook_name: str) -> Workbook:
32
+ def create_workbook(self, workbook_path: Path) -> Workbook:
33
33
  """
34
34
  Creates an Excel workbook using the openpyxl library and returns it.
35
35
 
36
- :param workbook_name: Name of the workbook to be created.
36
+ :param workbook_path: Path of the workbook to be created.
37
37
  :return: An openpyxl Workbook object representing the newly created workbook.
38
38
  """
39
39
  workbook = Workbook()
40
- workbook.title = workbook_name
40
+ workbook.save(workbook_path)
41
41
  return workbook
42
42
 
43
- def get_workbook_name(self, workbook: Workbook) -> str:
44
- """
45
- Returns the name of the given workbook.
46
-
47
- :param workbook: The workbook whose name should be returned.
48
- :return: Name of the workbook.
49
- """
50
- return workbook.title
51
-
52
- def remove_worksheet_by_name(self, workbook: Workbook, worksheet_name: str) -> None:
53
- """
54
- Remove worksheet from workbook by name.
55
- """
56
- worksheet = workbook[worksheet_name]
57
- workbook.remove(worksheet)
58
-
59
- def create_worksheet(self, workbook: Workbook, worksheet_name: str) -> Worksheet:
43
+ def create_workbook_and_worksheets(self, output_path: Path, classes: List[str]) -> None:
60
44
  """
61
- Creates an Excel worksheet with the given name in the given workbook.
45
+ Creates a workbook with worksheets for each class.
62
46
 
63
- :param workbook: The workbook to which the worksheet should be added.
64
- :param worksheet_name: Name of the worksheet to be created.
47
+ :param output_path: The path where the workbook should be created.
48
+ :param classes: List of class names for which worksheets should be created.
65
49
  """
66
- worksheet = workbook.create_sheet(worksheet_name)
67
- workbook_name = self.get_workbook_name(workbook)
68
- workbook.save(workbook_name)
50
+ workbook = self.create_workbook(output_path)
51
+ workbook.remove(workbook.active)
52
+ sv = self.schemaview
69
53
 
70
- return worksheet
54
+ for cls_name in classes:
55
+ cls = sv.get_class(class_name=cls_name, imports=self.mergeimports)
56
+ if not cls.mixin and not cls.abstract:
57
+ workbook.create_sheet(cls_name)
71
58
 
72
- def create_schema_worksheets(self, workbook: str) -> None:
73
- """
74
- Creates worksheets in a given Excel workbook based on the classes in the
75
- schema.
59
+ # Add columns to the worksheet for the current class
60
+ slots = [s.name for s in sv.class_induced_slots(cls_name, self.mergeimports)]
61
+ self.add_columns_to_worksheet(workbook, cls_name, slots)
62
+ workbook.save(output_path)
76
63
 
77
- :param workbook: The workbook to which the worksheet should be added.
78
- """
79
- sv = self.schemaview
80
- for cls_name, cls in sv.all_classes(imports=self.mergeimports).items():
81
- if not cls.mixin and not cls.abstract:
82
- self.create_worksheet(workbook, cls_name)
64
+ # Add enum validation for columns with enum types
65
+ enum_list = list(sv.all_enums(imports=self.mergeimports).keys())
66
+ for s in sv.class_induced_slots(cls_name, self.mergeimports):
67
+ if s.range in enum_list:
68
+ pv_list = list(sv.get_enum(s.range).permissible_values.keys())
69
+
70
+ # Check if the total length of permissible values is <= 255 characters
71
+ enum_length = sum(len(value) for value in pv_list)
72
+ if enum_length <= 255:
73
+ self.column_enum_validation(workbook, cls_name, s.name, pv_list)
74
+ else:
75
+ self.logger.warning(
76
+ f"'{s.range}' has permissible values with total "
77
+ "length > 255 characters. Dropdowns may not work properly "
78
+ f"in {output_path}"
79
+ )
80
+ workbook.save(output_path)
81
+
82
+ workbook.save(output_path)
83
+ if self.split_workbook_by_class:
84
+ self.logger.info(f"The Excel workbooks have been written to {output_path}")
83
85
 
84
86
  def add_columns_to_worksheet(self, workbook: Workbook, worksheet_name: str, sheet_headings: List[str]) -> None:
85
87
  """
@@ -96,10 +98,6 @@ class ExcelGenerator(Generator):
96
98
  for i, heading in enumerate(sheet_headings):
97
99
  worksheet.cell(row=1, column=i + 1, value=heading)
98
100
 
99
- # Save the changes to the workbook
100
- workbook_name = self.get_workbook_name(workbook)
101
- workbook.save(workbook_name)
102
-
103
101
  def column_enum_validation(
104
102
  self,
105
103
  workbook: Workbook,
@@ -129,48 +127,52 @@ class ExcelGenerator(Generator):
129
127
 
130
128
  dv.add(f"{column_letter}2:{column_letter}1048576")
131
129
 
132
- workbook_name = self.get_workbook_name(workbook)
133
- workbook.save(workbook_name)
134
-
135
130
  def serialize(self, **kwargs) -> str:
136
- self.output = (
137
- os.path.abspath(convert_to_snake_case(self.schema.name) + ".xlsx") if not self.output else self.output
138
- )
131
+ sv = self.schemaview
132
+ classes_to_process = [
133
+ cls_name
134
+ for cls_name, cls in sv.all_classes(imports=self.mergeimports).items()
135
+ if not cls.mixin and not cls.abstract
136
+ ]
139
137
 
140
- workbook = self.create_workbook(self.output)
141
- self.remove_worksheet_by_name(workbook, "Sheet")
142
- self.create_schema_worksheets(workbook)
138
+ if self.split_workbook_by_class:
139
+ output_path = Path(self.schema.name + "_worksheets") if not self.output else Path(self.output)
140
+ output_path = output_path.absolute()
143
141
 
144
- sv = self.schemaview
145
- for cls_name, cls in sv.all_classes(imports=self.mergeimports).items():
146
- if not cls.mixin and not cls.abstract:
147
- slots = [s.name for s in sv.class_induced_slots(cls_name, imports=self.mergeimports)]
148
- self.add_columns_to_worksheet(workbook, cls_name, slots)
142
+ if not output_path.is_dir():
143
+ output_path.mkdir(parents=True, exist_ok=True)
149
144
 
150
- enum_list = [e_name for e_name, _ in sv.all_enums(imports=self.mergeimports).items()]
151
- for cls_name, cls in sv.all_classes(imports=self.mergeimports).items():
152
- if not cls.mixin and not cls.abstract:
153
- for s in sv.class_induced_slots(cls_name, imports=self.mergeimports):
154
- if s.range in enum_list:
155
- pv_list = []
156
- for pv_name, _ in sv.get_enum(s.range).permissible_values.items():
157
- pv_list.append(pv_name)
158
- self.column_enum_validation(workbook, cls_name, s.name, pv_list)
159
- self.logger.info(f"The Excel workbook has been written to {self.output}")
145
+ for cls_name in classes_to_process:
146
+ cls_output_path = output_path.joinpath(f"{cls_name}.xlsx")
147
+ self.create_workbook_and_worksheets(cls_output_path, [cls_name])
148
+ self.logger.info(f"The Excel workbook for class '{cls_name}' has been written to {cls_output_path}")
149
+ else:
150
+ output_path = Path(self.schema.name + ".xlsx") if not self.output else Path(self.output)
151
+ output_path = output_path.absolute()
152
+
153
+ self.create_workbook_and_worksheets(output_path, classes_to_process)
154
+
155
+ self.logger.info(f"The Excel workbook has been written to {output_path}")
160
156
 
161
157
 
162
158
  @shared_arguments(ExcelGenerator)
163
159
  @click.command()
160
+ @click.option(
161
+ "--split-workbook-by-class",
162
+ is_flag=True,
163
+ default=False,
164
+ help="""Split model into separate Excel workbooks/files, one for each class""",
165
+ )
164
166
  @click.option(
165
167
  "-o",
166
168
  "--output",
167
169
  type=click.Path(),
168
- help="""Name of Excel spreadsheet to be created""",
170
+ help="""Name of Excel spreadsheet to be created, or name of directory to create split workbooks in""",
169
171
  )
170
172
  @click.version_option(__version__, "-V", "--version")
171
- def cli(yamlfile, **kwargs):
173
+ def cli(yamlfile, split_workbook_by_class, **kwargs):
172
174
  """Generate Excel representation of a LinkML model"""
173
- ExcelGenerator(yamlfile, **kwargs).serialize(**kwargs)
175
+ ExcelGenerator(yamlfile, split_workbook_by_class=split_workbook_by_class, **kwargs).serialize(**kwargs)
174
176
 
175
177
 
176
178
  if __name__ == "__main__":
@@ -2,6 +2,7 @@
2
2
  import logging
3
3
  import os
4
4
  from collections import defaultdict
5
+ from copy import copy
5
6
  from dataclasses import dataclass, field
6
7
  from enum import Enum, unique
7
8
  from typing import Any, List, Mapping, Optional, Set, Tuple, Union
@@ -403,6 +404,48 @@ class OwlSchemaGenerator(Generator):
403
404
  else:
404
405
  self.graph.add((subject_expr, RDFS.subClassOf, superclass_expr))
405
406
 
407
+ def get_own_slots(self, cls: Union[ClassDefinition, AnonymousClassExpression]) -> List[SlotDefinition]:
408
+ """
409
+ Get the slots that are defined on a class, excluding those that are inherited.
410
+
411
+ :param cls:
412
+ :return:
413
+ """
414
+ sv = self.schemaview
415
+ if isinstance(cls, ClassDefinition):
416
+ own_slots = (
417
+ list(cls.slot_usage.values()) + list(cls.attributes.values()) + list(cls.slot_conditions.values())
418
+ )
419
+ for slot_name in cls.slots:
420
+ # if slot_name not in cls.slot_usage:
421
+ slot = sv.get_slot(slot_name)
422
+ if slot:
423
+ own_slots.append(slot)
424
+ else:
425
+ logging.warning(f"Unknown top-level slot {slot_name}")
426
+ else:
427
+ own_slots = []
428
+ own_slots.extend(cls.slot_conditions.values())
429
+ # merge slots with the same name
430
+ slot_map = {}
431
+ for slot in own_slots:
432
+ if slot.name in slot_map:
433
+ for k, v in slot.__dict__.items():
434
+ curr = slot_map[slot.name].get(k, None)
435
+ # print(f"MERGE={slot.name}.{k} = {v} // CURR={curr}")
436
+ if v and not curr:
437
+ slot_map[slot.name][k] = v
438
+ # print(f"OVERRIDE={slot.name}, k={k}, v={v}")
439
+ else:
440
+ slot_map[slot.name] = copy(slot.__dict__)
441
+ # print(f"INIT={slot.name}, vals={slot_map[slot.name]}")
442
+
443
+ # print(f"SN={slot.name}, vals={slot_map[slot.name]}")
444
+ own_slots = [SlotDefinition(**v) for v in slot_map.values()]
445
+ # sort by name
446
+ own_slots.sort(key=lambda x: x.name)
447
+ return own_slots
448
+
406
449
  def transform_class_expression(
407
450
  self,
408
451
  cls: Union[ClassDefinition, AnonymousClassExpression],
@@ -420,20 +463,7 @@ class OwlSchemaGenerator(Generator):
420
463
  """
421
464
  graph = self.graph
422
465
  sv = self.schemaview
423
- if isinstance(cls, ClassDefinition):
424
- own_slots = (
425
- list(cls.slot_usage.values()) + list(cls.attributes.values()) + list(cls.slot_conditions.values())
426
- )
427
- for slot_name in cls.slots:
428
- if slot_name not in cls.slot_usage:
429
- slot = sv.get_slot(slot_name)
430
- if slot:
431
- own_slots.append(slot)
432
- else:
433
- own_slots = []
434
- own_slots.extend(cls.slot_conditions.values())
435
- # sort by name
436
- own_slots.sort(key=lambda x: x.name)
466
+ own_slots = self.get_own_slots(cls)
437
467
  owl_exprs = []
438
468
  if cls.any_of:
439
469
  owl_exprs.append(self._union_of([self.transform_class_expression(x) for x in cls.any_of]))
@@ -23,6 +23,7 @@ from linkml.generators.pythongen import PythonGenerator
23
23
  from linkml.generators.shaclgen import ShaclGenerator
24
24
  from linkml.generators.shexgen import ShExGenerator
25
25
  from linkml.generators.sqlddlgen import SQLDDLGenerator
26
+ from linkml.utils.cli_utils import log_level_option
26
27
  from linkml.utils.generator import Generator
27
28
 
28
29
  PATH_FSTRING = str
@@ -103,7 +104,7 @@ class ProjectGenerator:
103
104
  all_schemas = [schema_path]
104
105
  else:
105
106
  all_schemas = get_local_imports(schema_path, os.path.dirname(schema_path))
106
- print(f"ALL_SCHEMAS = {all_schemas}")
107
+ logging.debug(f"ALL_SCHEMAS = {all_schemas}")
107
108
  for gen_name, (gen_cls, gen_path_fmt, default_gen_args) in GEN_MAP.items():
108
109
  if config.includes is not None and config.includes != [] and gen_name not in config.includes:
109
110
  logging.info(f"Skipping {gen_name} as not in inclusion list: {config.includes}")
@@ -178,6 +179,7 @@ class ProjectGenerator:
178
179
  show_default=True,
179
180
  help="Merge imports into source file",
180
181
  )
182
+ @log_level_option
181
183
  @click.argument("yamlfile")
182
184
  @click.version_option(__version__, "-V", "--version")
183
185
  def cli(
@@ -221,7 +223,6 @@ def cli(
221
223
  top_class: Container
222
224
 
223
225
  """
224
- logging.basicConfig(level=logging.INFO)
225
226
  project_config = ProjectConfiguration()
226
227
  if config_file is not None:
227
228
  for k, v in yaml.safe_load(config_file).items():
@@ -44,14 +44,14 @@ from __future__ import annotations
44
44
  from datetime import datetime, date
45
45
  from enum import Enum
46
46
  from typing import List, Dict, Optional, Any, Union"""
47
- if pydantic_ver == 1:
47
+ if pydantic_ver == "1":
48
48
  template += """
49
- from pydantic import BaseModel as BaseModel, Field"""
50
- else:
49
+ from pydantic import BaseModel as BaseModel, Field, validator"""
50
+ elif pydantic_ver == "2":
51
51
  template += """
52
- from pydantic import BaseModel as BaseModel, ConfigDict, Field"""
53
-
52
+ from pydantic import BaseModel as BaseModel, ConfigDict, Field, field_validator"""
54
53
  template += """
54
+ import re
55
55
  import sys
56
56
  if sys.version_info >= (3, 8):
57
57
  from typing import Literal
@@ -108,7 +108,8 @@ class {{ e.name }}(str{% if e['values'] %}, Enum{% endif %}):
108
108
  {% endfor %}
109
109
  """
110
110
  ### CLASSES ###
111
- template += """
111
+ if pydantic_ver == "1":
112
+ template += """
112
113
  {%- for c in schema.classes.values() %}
113
114
  class {{ c.name }}
114
115
  {%- if class_isa_plus_mixins[c.name] -%}
@@ -142,8 +143,76 @@ class {{ c.name }}
142
143
  {% else -%}
143
144
  None
144
145
  {% endfor %}
146
+ {% for attr in c.attributes.values() if c.attributes -%}
147
+ {%- if attr.pattern %}
148
+ @validator('{{attr.name}}', allow_reuse=True)
149
+ def pattern_{{attr.name}}(cls, v):
150
+ pattern=re.compile(r"{{attr.pattern}}")
151
+ if isinstance(v,list):
152
+ for element in v:
153
+ if not pattern.match(element):
154
+ raise ValueError(f"Invalid {{attr.name}} format: {element}")
155
+ elif isinstance(v,str):
156
+ if not pattern.match(v):
157
+ raise ValueError(f"Invalid {{attr.name}} format: {v}")
158
+ return v
159
+ {% endif -%}
160
+ {% endfor %}
145
161
  {% endfor %}
146
162
  """
163
+ elif pydantic_ver == "2":
164
+ template += """
165
+ {%- for c in schema.classes.values() %}
166
+ class {{ c.name }}
167
+ {%- if class_isa_plus_mixins[c.name] -%}
168
+ ({{class_isa_plus_mixins[c.name]|join(', ')}})
169
+ {%- else -%}
170
+ (ConfiguredBaseModel)
171
+ {%- endif -%}
172
+ :
173
+ {% if c.description -%}
174
+ \"\"\"
175
+ {{ c.description }}
176
+ \"\"\"
177
+ {%- endif %}
178
+ {% for attr in c.attributes.values() if c.attributes -%}
179
+ {{attr.name}}: {{ attr.annotations['python_range'].value }} = Field(
180
+ {%- if predefined_slot_values[c.name][attr.name] -%}
181
+ {{ predefined_slot_values[c.name][attr.name] }}
182
+ {%- elif (attr.required or attr.identifier or attr.key) -%}
183
+ ...
184
+ {%- else -%}
185
+ None
186
+ {%- endif -%}
187
+ {%- if attr.title != None %}, title="{{attr.title}}"{% endif -%}
188
+ {%- if attr.description %}, description=\"\"\"{{attr.description}}\"\"\"{% endif -%}
189
+ {%- if attr.equals_number != None %}, le={{attr.equals_number}}, ge={{attr.equals_number}}
190
+ {%- else -%}
191
+ {%- if attr.minimum_value != None %}, ge={{attr.minimum_value}}{% endif -%}
192
+ {%- if attr.maximum_value != None %}, le={{attr.maximum_value}}{% endif -%}
193
+ {%- endif -%}
194
+ )
195
+ {% else -%}
196
+ None
197
+ {% endfor %}
198
+ {% for attr in c.attributes.values() if c.attributes -%}
199
+ {%- if attr.pattern %}
200
+ @field_validator('{{attr.name}}')
201
+ def pattern_{{attr.name}}(cls, v):
202
+ pattern=re.compile(r"{{attr.pattern}}")
203
+ if isinstance(v,list):
204
+ for element in v:
205
+ if not pattern.match(element):
206
+ raise ValueError(f"Invalid {{attr.name}} format: {element}")
207
+ elif isinstance(v,str):
208
+ if not pattern.match(v):
209
+ raise ValueError(f"Invalid {{attr.name}} format: {v}")
210
+ return v
211
+ {% endif -%}
212
+ {% endfor %}
213
+ {% endfor %}
214
+ """
215
+
147
216
  ### FWD REFS / REBUILD MODEL ###
148
217
  if pydantic_ver == "1":
149
218
  template += """
@@ -509,6 +509,7 @@ dataclasses._init_fn = dataclasses_init_fn_with_kwargs
509
509
  initializers += [self.gen_class_variable(cls, slot, False) for slot in slot_variables]
510
510
 
511
511
  # Followed by everything else
512
+
512
513
  slot_variables = self._slot_iter(cls, lambda slot: not slot.required and slot in domain_slots)
513
514
  initializers += [self.gen_class_variable(cls, slot, False) for slot in slot_variables]
514
515
 
@@ -604,7 +605,7 @@ dataclasses._init_fn = dataclasses_init_fn_with_kwargs
604
605
 
605
606
  def class_reference_type(self, slot: SlotDefinition, cls: Optional[ClassDefinition]) -> Tuple[str, str, str]:
606
607
  """
607
- Return the type of a slot referencing a class
608
+ Return the type of slot referencing a class
608
609
 
609
610
  :param slot: slot to be typed
610
611
  :param cls: owning class. Used for generating key references
@@ -734,6 +735,12 @@ dataclasses._init_fn = dataclasses_init_fn_with_kwargs
734
735
  return typ_name
735
736
 
736
737
  def gen_constructor(self, cls: ClassDefinition) -> Optional[str]:
738
+ """
739
+ Generate python constructor for class
740
+
741
+ :param cls: class to generate constructor for
742
+ :return: python constructor
743
+ """
737
744
  rlines: List[str] = []
738
745
  designators = [x for x in self.domain_slots(cls) if x.designates_type]
739
746
  if len(designators) > 0:
@@ -795,6 +802,11 @@ dataclasses._init_fn = dataclasses_init_fn_with_kwargs
795
802
  if self.is_class_unconstrained(self.schema.classes[slot.range]):
796
803
  return ""
797
804
 
805
+ if slot.range in self.schema.enums:
806
+ # Open enum
807
+ if not self.schema.enums[slot.range].permissible_values:
808
+ return ""
809
+
798
810
  aliased_slot_name = self.slot_name(slot.name) # Mangled name by which the slot is known in python
799
811
  _, _, base_type_name = self.class_reference_type(slot, cls)
800
812
 
@@ -845,7 +857,7 @@ dataclasses._init_fn = dataclasses_init_fn_with_kwargs
845
857
  elif slot.inlined:
846
858
  slot_range_cls = self.schema.classes[slot.range]
847
859
  identifier = self.class_identifier(slot_range_cls)
848
- # If we don't have an identifier and we are expecting to be inlined first class elements
860
+ # If we don't have an identifier, and we are expecting to be inlined first class elements
849
861
  # (inlined_as_list is not True), we will use the first required field as the key.
850
862
  # Note that this may not always work, but the workaround is straight forward -- set inlined_as_list to
851
863
  # True
@@ -0,0 +1,25 @@
1
+ import logging
2
+
3
+ import click
4
+
5
+ LOG_LEVEL_STRINGS = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]
6
+ DEFAULT_LOG_LEVEL: str = "WARNING"
7
+ DEFAULT_LOG_LEVEL_INT: int = logging.WARNING
8
+
9
+
10
+ def log_level_option(fn):
11
+ def callback(ctx, param, value):
12
+ log_level_string = value.upper()
13
+ log_level_int = getattr(logging, log_level_string, None)
14
+ if not isinstance(log_level_int, int):
15
+ raise ValueError(f"Invalid log level: {log_level_string}")
16
+ logging.basicConfig(level=log_level_int)
17
+
18
+ return click.option(
19
+ "--log_level",
20
+ type=click.Choice(LOG_LEVEL_STRINGS),
21
+ help="Logging level",
22
+ default=DEFAULT_LOG_LEVEL,
23
+ show_default=True,
24
+ callback=callback,
25
+ )(fn)
linkml/utils/generator.py CHANGED
@@ -51,13 +51,11 @@ from linkml_runtime.utils.formatutils import camelcase, underscore
51
51
  from linkml_runtime.utils.namespaces import Namespaces
52
52
 
53
53
  from linkml import LOCAL_METAMODEL_YAML_FILE
54
+ from linkml.utils.cli_utils import DEFAULT_LOG_LEVEL_INT, log_level_option
54
55
  from linkml.utils.mergeutils import alias_root
55
56
  from linkml.utils.schemaloader import SchemaLoader
56
57
  from linkml.utils.typereferences import References
57
58
 
58
- DEFAULT_LOG_LEVEL: str = "WARNING"
59
- DEFAULT_LOG_LEVEL_INT: int = logging.WARNING
60
-
61
59
 
62
60
  @lru_cache
63
61
  def _resolved_metamodel(mergeimports):
@@ -223,7 +221,7 @@ class Generator(metaclass=abc.ABCMeta):
223
221
  else:
224
222
  if isinstance(schema, SchemaDefinition):
225
223
  # schemaloader based methods require schemas to have been created via SchemaLoader,
226
- # which prepopulates some fields (e.g definition_url). If the schema has not been processed through the
224
+ # which prepopulates some fields (e.g. definition_url). If the schema has not been processed through the
227
225
  # loader, then roundtrip
228
226
  if any(c for c in schema.classes.values() if not c.definition_uri):
229
227
  schema = yaml_dumper.dumps(schema)
@@ -339,11 +337,11 @@ class Generator(metaclass=abc.ABCMeta):
339
337
 
340
338
  def visit_class_slot(self, cls: ClassDefinition, aliased_slot_name: str, slot: SlotDefinition) -> None:
341
339
  """Visited for each slot in a class. If class level visit_all_slots is true, this is visited once
342
- for any class that is inherited (class itself, is_a, mixin, apply_to). Otherwise just the own slots.
340
+ for any class that is inherited (class itself, is_a, mixin, apply_to). Otherwise, just the own slots.
343
341
 
344
342
  @param cls: containing class
345
343
  @param aliased_slot_name: Aliased slot name. May not be unique across all class slots
346
- @param slot: slot being visited
344
+ @param slot: being visited
347
345
  """
348
346
  ...
349
347
 
@@ -758,11 +756,28 @@ class Generator(metaclass=abc.ABCMeta):
758
756
  # TODO: add lru cache once we get identity into the classes
759
757
  def domain_slots(self, cls: ClassDefinition) -> List[SlotDefinition]:
760
758
  """Return all slots in the class definition that are owned by the class"""
761
- return [
762
- slot
763
- for slot in [self.schema.slots[sn] for sn in cls.slots]
764
- if cls.name in slot.domain_of or (set(cls.mixins).intersection(slot.domain_of))
765
- ]
759
+ domain_slots = []
760
+ for slot_name in cls.slots:
761
+ slot = self.schema.slots[slot_name]
762
+
763
+ # add any mixin ancestors here so that slots will be distributed to descendents correctly via mixin
764
+ # hierarchy.
765
+ mixin_ancestors = []
766
+ if cls.mixins:
767
+ for mixin in cls.mixins:
768
+ for ancestor in self.schemaview.class_ancestors(mixin, mixins=False):
769
+ if ancestor not in mixin_ancestors:
770
+ mixin_ancestors.append(ancestor)
771
+
772
+ for mixin_ancestor in mixin_ancestors:
773
+ if mixin_ancestor not in cls.mixins:
774
+ cls.mixins.append(mixin_ancestor)
775
+
776
+ # Check if the class is in the domain of the slot or if any of its mixins are in the domain
777
+ if cls.name in slot.domain_of or (set(cls.mixins).intersection(slot.domain_of)):
778
+ domain_slots.append(slot)
779
+
780
+ return domain_slots
766
781
 
767
782
  def add_mappings(self, defn: Definition) -> None:
768
783
  """
@@ -855,32 +870,16 @@ class Generator(metaclass=abc.ABCMeta):
855
870
 
856
871
 
857
872
  def shared_arguments(g: Type[Generator]) -> Callable[[Command], Command]:
858
- _LOG_LEVEL_STRINGS = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]
859
-
860
- def _log_level_string_to_int(log_level_string: str) -> int:
861
- log_level_string = log_level_string.upper()
862
- level = [e for e in log_level_string if e.startswith(log_level_string)]
863
- if not level:
864
- pass
865
- log_level_int = getattr(logging, log_level_string[0], logging.INFO)
866
- assert isinstance(log_level_int, int)
867
- return log_level_int
868
-
869
873
  def verbosity_callback(ctx, param, verbose):
870
874
  if verbose >= 2:
871
- logging.basicConfig(level=logging.DEBUG)
875
+ logging.basicConfig(level=logging.DEBUG, force=True)
872
876
  elif verbose == 1:
873
- logging.basicConfig(level=logging.INFO)
874
- else:
875
- logging.basicConfig(level=logging.WARNING)
877
+ logging.basicConfig(level=logging.INFO, force=True)
876
878
 
877
879
  def stacktrace_callback(ctx, param, stacktrace):
878
880
  if not stacktrace:
879
881
  sys.tracebacklimit = 0
880
882
 
881
- def log_level_callback(ctx, param, value):
882
- logging.basicConfig(level=_log_level_string_to_int(value))
883
-
884
883
  def decorator(f: Command) -> Command:
885
884
  f.params.append(Argument(("yamlfile",), type=click.Path(exists=True, dir_okay=False)))
886
885
  f.params.append(
@@ -909,21 +908,12 @@ def shared_arguments(g: Type[Generator]) -> Callable[[Command], Command]:
909
908
  )
910
909
  )
911
910
  f.params.append(Option(("--importmap", "-im"), type=click.File(), help="Import mapping file"))
912
- f.params.append(
913
- Option(
914
- ("--log_level",),
915
- type=click.Choice(_LOG_LEVEL_STRINGS),
916
- help="Logging level",
917
- default=DEFAULT_LOG_LEVEL,
918
- show_default=True,
919
- callback=log_level_callback,
920
- )
921
- )
911
+ log_level_option(f)
922
912
  f.params.append(
923
913
  Option(
924
914
  ("--verbose", "-v"),
925
915
  count=True,
926
- help="verbosity",
916
+ help="Verbosity. Takes precedence over --log_level.",
927
917
  callback=verbosity_callback,
928
918
  )
929
919
  )
@@ -3,7 +3,7 @@ import re
3
3
  from collections import defaultdict
4
4
  from copy import copy
5
5
  from dataclasses import dataclass
6
- from typing import Any, Callable, Dict, List, Union
6
+ from typing import Any, Callable, Dict, List, Optional, Union
7
7
 
8
8
  import click
9
9
  import yaml
@@ -319,15 +319,16 @@ class SchemaFixer:
319
319
  schema_dict: Dict[str, Any] = None,
320
320
  rules: Dict[str, Callable] = None,
321
321
  imports=False,
322
+ preserve_original_using: Optional[str] = None,
322
323
  ) -> Union[YAMLRoot, Dict]:
323
324
  """
324
325
  Changes element names to conform to naming conventions.
325
326
 
326
-
327
327
  :param schema: input schema
328
328
  :param schema_dict: if specified, the transformation will happen on this dictionary object
329
329
  :param rules: mappings between index slots and functions that normalize names
330
330
  :param imports: if True, all that imported modules are also fixed
331
+ :param preserve_original_using: if specified, the original name will be preserved in this slot
331
332
  :return:
332
333
  """
333
334
  if rules is None:
@@ -339,6 +340,7 @@ class SchemaFixer:
339
340
  }
340
341
  fixes = {}
341
342
  sv = SchemaView(schema)
343
+ preserved = []
342
344
  for n, e in sv.all_elements(imports=imports).items():
343
345
  if e.from_schema == "https://w3id.org/linkml/types":
344
346
  continue
@@ -348,9 +350,35 @@ class SchemaFixer:
348
350
  normalized = func(n)
349
351
  if normalized != n:
350
352
  fixes[n] = normalized
353
+ if preserve_original_using is not None:
354
+ preserved.append((typ, normalized, n))
355
+ # if preserve_original_using is not None:
356
+ # setattr(e, preserve_original_using, n)
357
+ # print(f"SETTING {typ} {e.name}.{preserve_original_using} = {n}")
351
358
  if schema_dict is not None:
352
359
  schema = schema_dict
353
- return yaml_rewrite(schema, fixes)
360
+ schema = yaml_rewrite(schema, fixes)
361
+ for typ, normalized, original in preserved:
362
+ pathmap = {
363
+ ClassDefinition.__name__: "classes",
364
+ TypeDefinition.__name__: "types",
365
+ SlotDefinition.__name__: "slots",
366
+ EnumDefinition.__name__: "enums",
367
+ }
368
+ if isinstance(schema, dict):
369
+ path = schema[pathmap[typ]]
370
+ if normalized not in path:
371
+ logger.warning(f"Cannot find {typ} {normalized} in {pathmap[typ]}")
372
+ continue
373
+ e = path[normalized]
374
+ if preserve_original_using not in e:
375
+ path[normalized][preserve_original_using] = original
376
+ else:
377
+ path = getattr(schema, pathmap[typ])
378
+ e = path[normalized]
379
+ if not getattr(e, preserve_original_using, None):
380
+ setattr(e, preserve_original_using, original)
381
+ return schema
354
382
 
355
383
 
356
384
  @click.group()
@@ -376,6 +404,13 @@ def main(verbose: int, quiet: bool):
376
404
  show_default=True,
377
405
  help="Apply fix to referenced elements from modules",
378
406
  )
407
+ @click.option(
408
+ "--preserve-original-using",
409
+ "-P",
410
+ default=None,
411
+ show_default=True,
412
+ help="If specified, original name will be preserved in this slot (e.g. title)",
413
+ )
379
414
  def fix_names(input_schema, **kwargs):
380
415
  """Fix element names to conform to naming conventions"""
381
416
  with open(input_schema) as f:
@@ -691,7 +691,7 @@ class SchemaLoader:
691
691
  cls.is_a,
692
692
  )
693
693
  for mixin in cls.mixins:
694
- # Note that apply_to has ben injected as a faux mixin so it gets covered here
694
+ # Note that apply_to has been injected as a faux mixin, so it gets covered here
695
695
  if mixin in self.schema.classes:
696
696
  self.merge_class(self.schema.classes[mixin], merged_classes)
697
697
  merge_classes(self.schema, cls, self.schema.classes[mixin], True)
@@ -1,3 +1,4 @@
1
+ from functools import lru_cache
1
2
  from pathlib import Path
2
3
  from typing import Any, Iterator, List, Optional, TextIO, Union
3
4
 
@@ -95,7 +96,7 @@ class Validator:
95
96
  if not self._validation_plugins:
96
97
  return []
97
98
 
98
- context = ValidationContext(self._schema, target_class)
99
+ context = self._context(target_class)
99
100
 
100
101
  for plugin in self._validation_plugins:
101
102
  plugin.pre_process(context)
@@ -117,3 +118,7 @@ class Validator:
117
118
 
118
119
  for plugin in self._validation_plugins:
119
120
  plugin.post_process(context)
121
+
122
+ @lru_cache
123
+ def _context(self, target_class: Optional[str] = None) -> ValidationContext:
124
+ return ValidationContext(self._schema, target_class)
@@ -20,8 +20,7 @@ from linkml_runtime.linkml_model import ElementName
20
20
  from linkml_runtime.utils.formatutils import camelcase
21
21
 
22
22
  from linkml.generators.pythongen import PythonGenerator
23
- from linkml.utils.datavalidator import DataValidator
24
- from linkml.validators import JsonSchemaDataValidator
23
+ from linkml.validator import Validator, _get_default_validator
25
24
 
26
25
 
27
26
  @dataclass
@@ -77,7 +76,7 @@ class ExampleRunner:
77
76
  prefix_map: Optional[Mapping[str, str]] = None
78
77
  """Custom prefix map, for emitting RDF/turtle."""
79
78
 
80
- _validator: Optional[DataValidator] = None
79
+ _validator: Optional[Validator] = None
81
80
 
82
81
  expand_dicts: bool = None
83
82
  """If true, then expand all dicts prior to validation."""
@@ -101,14 +100,14 @@ class ExampleRunner:
101
100
  return self._python_module
102
101
 
103
102
  @property
104
- def validator(self) -> DataValidator:
103
+ def validator(self) -> Validator:
105
104
  """
106
105
  Get the current validator
107
106
 
108
107
  :return:
109
108
  """
110
109
  if self._validator is None:
111
- self._validator = JsonSchemaDataValidator(self.schemaview.schema)
110
+ self._validator = _get_default_validator(self.schemaview.schema)
112
111
  return self._validator
113
112
 
114
113
  def process_examples(self):
@@ -179,7 +178,11 @@ class ExampleRunner:
179
178
  summary.add(f"## {stem}", "### Input", "```yaml", f"{yaml.dump(input_dict)}", "```")
180
179
  success = True
181
180
  try:
182
- validator.validate_dict(input_dict, tc, closed=True)
181
+ report = validator.validate(input_dict, tc)
182
+ if report.results:
183
+ raise Exception(
184
+ "\n".join(f"[{result.severity.value}] {result.message}" for result in report.results)
185
+ )
183
186
  # json validation is incomplete: also try object instantiation
184
187
  self._load_from_dict(input_dict, target_class=tc)
185
188
  except Exception as e:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: linkml
3
- Version: 1.6.4
3
+ Version: 1.6.6
4
4
  Summary: Linked Open Data Modeling Language
5
5
  Home-page: https://linkml.io/linkml/
6
6
  Keywords: schema,linked data,data modeling,rdf,owl,biolink
@@ -19,7 +19,7 @@ linkml/generators/docgen/type.md.jinja2,sha256=QmCMJZrFwP33eHkggBVtypbyrxTb-XZn9
19
19
  linkml/generators/docgen.py,sha256=zsLzbXN2t9pafQubEnb7QshufMKBMqUWjdLE1OyFyq8,33296
20
20
  linkml/generators/dotgen.py,sha256=CnbVY6CO1OMuiYXYnvxgNN2IW1mtOQW-J-QnwZlXkUI,5012
21
21
  linkml/generators/erdiagramgen.py,sha256=Gu-_nhLuEPTsYYaoV6tNS1V6cZ2dNJdm6YwxC0VGl7g,10315
22
- linkml/generators/excelgen.py,sha256=uFc8xOCeBhWhWdHPTepotBBaIQPtBA6MkFfVtiKWGhw,6699
22
+ linkml/generators/excelgen.py,sha256=ClzWs3wO-QjH2XzzQVUsGF2n6fnoozmXopxPYDEJyPs,7232
23
23
  linkml/generators/golanggen.py,sha256=Dnl7dhmb1AIK3Is7KRaUbxPd3kBTjWuspFqardiBTJ8,5751
24
24
  linkml/generators/golrgen.py,sha256=tIsbsr4SM9HxeK7TCUwnq-GdSKZ_qW5f7fybg_aqCZE,3436
25
25
  linkml/generators/graphqlgen.py,sha256=6qZpI0rwg3ypsv_KrLVzXgdsJfR8LNPqgMwaRwzwnDs,2151
@@ -34,13 +34,13 @@ linkml/generators/linkmlgen.py,sha256=QhIPA1v2g_g5fien3ZKN-L6TkDk3t7puVFrcoEnwkw
34
34
  linkml/generators/markdowngen.py,sha256=ZPLahEPjWsrAsKq4CHbVDXeVd0n1NO-2STs068-g0Ac,32948
35
35
  linkml/generators/namespacegen.py,sha256=vVcIyM0zlKd7XRvtdzwTwHjG4Pg49801gy4FUmjJlqQ,6450
36
36
  linkml/generators/oocodegen.py,sha256=l-5zck_NNAUxJiApJrnDPkqxCv3b6v5s4hC_smelszU,7620
37
- linkml/generators/owlgen.py,sha256=6_OJufisaEetp9_bIrnKrQYvhxU8TiJhHZnSIacbEXw,51189
37
+ linkml/generators/owlgen.py,sha256=omOfL5oKHeQs-JARbGbTvydZ6DX4Bzyv5pukgW02kds,52419
38
38
  linkml/generators/plantumlgen.py,sha256=Vs__5x9ioiT4IBTbvZUpgT8MsYJ0amfBL64MB_nmQPc,14870
39
39
  linkml/generators/prefixmapgen.py,sha256=JJ7hgzuqKVfFZrbDV76Dk8dR2NHsmpp-eNUAspXkfwA,4626
40
- linkml/generators/projectgen.py,sha256=EVgS5bDzFTm3WAuMg3lC3rzdcaW-hgpq99qZA4nksSY,9544
40
+ linkml/generators/projectgen.py,sha256=g3JR2oXPM_QXhWUGukP9ts1P7tqxIeABaRdv130gbo4,9578
41
41
  linkml/generators/protogen.py,sha256=9YfxBZkQdBWwsUbstxEUR4xRWNuAKSfz9zXPhgIYePU,2328
42
- linkml/generators/pydanticgen.py,sha256=CiUHkThpYcVzSkWb4IODhteKau3z-ZQ7yQpFA9dSRGw,22298
43
- linkml/generators/pythongen.py,sha256=DBsZNa9T3fGW4Qw0_l8N72f_DeUEugvcbRN9FJLi1QM,52166
42
+ linkml/generators/pydanticgen.py,sha256=RoFgX3-_8cx70Q3sUmSrZc0zTHbS0d55HgQp45U7fb4,24872
43
+ linkml/generators/pythongen.py,sha256=yGYlRJ4rNm2QQLFDjyuUnqCyKlzz-b3eSOhkSu8aCwI,52491
44
44
  linkml/generators/rdfgen.py,sha256=LxzYBaFEkV7rlf54nWv_6H6AGcWMRXwkaeVXq9VYEc8,2693
45
45
  linkml/generators/shaclgen.py,sha256=KxNmDZW2ciCuSqUhJ65TxLTjF8jME1FmN5SaWJCuW9k,8662
46
46
  linkml/generators/shexgen.py,sha256=Awtn5SyjS-TUcVCwMdT0F7hNO4K8VcSCYBaFru45Mwg,8994
@@ -82,19 +82,20 @@ linkml/transformers/model_transformer.py,sha256=tK_MpRDI-q2qfe8KHT6qJHP8ZruKjYx1
82
82
  linkml/transformers/relmodel_transformer.py,sha256=hRUVtH4gylDssOXoWvVxTetF9ESbITrAZOFu53b_Eg0,17836
83
83
  linkml/transformers/schema_renamer.py,sha256=Cr18TyktX64b5iFh5V6R_ILPVzXjbDYVDDZQyqFiAv8,5271
84
84
  linkml/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
85
+ linkml/utils/cli_utils.py,sha256=MdVbox1qr6kDUHUCWAmFhV-D6ebs_nn8vVwB8KDQfew,742
85
86
  linkml/utils/converter.py,sha256=rdhCI7Tsjddr3o1rVBfMq5gQubk_GE6fqlBBmyxI5_M,6270
86
87
  linkml/utils/datautils.py,sha256=2XWM9LBSVp8v3SwIZECrX3SjDUYzdnP-syjp6YdL89E,3734
87
88
  linkml/utils/datavalidator.py,sha256=kBdWaVi8IZT1bOwEJgJYx-wZAb_PTBObB9nHpYORfKA,472
88
89
  linkml/utils/execute_tutorial.py,sha256=T4kHTSyz3ItJGEUZxVjR-3yLVKnOr5Ix4NMGE47-IuE,6912
89
- linkml/utils/generator.py,sha256=74g3s5FVpOqfOtkMTrpk0k95ny2-kSFc0AvcSlKgka0,38320
90
+ linkml/utils/generator.py,sha256=WAlP_gfZfAZYNklsh8l4GtiWZ338kjLg7xpQAANgUNg,38217
90
91
  linkml/utils/helpers.py,sha256=yR8n4zFA5wPcYC7xzRuNF3wO16vG80v6j7DM3qTNmIc,447
91
92
  linkml/utils/ifabsent_functions.py,sha256=FZwceqwlq81lMPDsdNfSHhtzDXSf8F4cbbhRdnDzjss,5765
92
93
  linkml/utils/logictools.py,sha256=GSmBiobC49TcQjE08RtXEE3JwJEOV7eEREio25uJiFs,21184
93
94
  linkml/utils/mergeutils.py,sha256=QVm2iQB4v_L2rSvPBsPe9C865R03BgV3TzlPoTTTwWQ,9044
94
95
  linkml/utils/rawloader.py,sha256=QB7Rdvy4o4ZJEWBWa2_2xzz2TOh_6Oe4slvUn8IBVIc,4329
95
96
  linkml/utils/schema_builder.py,sha256=WLSit3J4lTifaFLLWTwjqIRiTru1pqvTIUuC1TrxS6Y,9902
96
- linkml/utils/schema_fixer.py,sha256=rjwJB5ukfrgc0Z-j3mKSNzRMkHPp_k_zFKaFNIPeIv8,15086
97
- linkml/utils/schemaloader.py,sha256=Ju1QCJPdNbLkXzTyvXmv7Bpk6EQ07Hzh7qgeYdiyTU8,46419
97
+ linkml/utils/schema_fixer.py,sha256=ajsxpwD4yMjDk1iDtoKJTsa34SIqGshWxlnSNXVZ52w,16745
98
+ linkml/utils/schemaloader.py,sha256=bBSTqimMDTFH2FcKtRz99dKNJzV_myPsZSkIFp_6-A0,46421
98
99
  linkml/utils/schemasynopsis.py,sha256=6NKa89bkZfZQE9QM0St-6xQcrsHPkYmBgnnWnlgAcQ4,18455
99
100
  linkml/utils/sqlutils.py,sha256=86XeEbfY0Dk-EObw4q5-dxyzSeBtmIhjqqyDcR8ALS0,16591
100
101
  linkml/utils/typereferences.py,sha256=8Yfuz9-HAwOPoJLbIcO_sY9zf32hvPRzGeSOzECfMWA,2232
@@ -114,7 +115,7 @@ linkml/validator/plugins/recommended_slots_plugin.py,sha256=kOdoYQyye47nLA7BjorV
114
115
  linkml/validator/plugins/validation_plugin.py,sha256=9SMHF8b2bgG9-8351e8bY676e0A4aEBJSXvMjMF5kXg,1548
115
116
  linkml/validator/report.py,sha256=kkkuh-IZF9--cO-2wGjwP3PDLvOcjjvC8AOlxXUIOAM,870
116
117
  linkml/validator/validation_context.py,sha256=MmOwLk4cF_Cy7fPdFK61Eti3c3dgzKSIu6r_PmkkoZs,2388
117
- linkml/validator/validator.py,sha256=jOSdYyC8QIm1GWmllM7Z1_GV-2VO3hwEwdF2AHE-DNY,5476
118
+ linkml/validator/validator.py,sha256=sdWbAOlnNYQWnZSEIuLpGQqH3W4cNq_8M_CdLtsNMH0,5648
118
119
  linkml/validators/__init__.py,sha256=43W3J5NPKhwa3ZFHLRYsJMocwQKWGYCF9Ki9r0ccGbc,202
119
120
  linkml/validators/jsonschemavalidator.py,sha256=_v0finzU2RGPC5xo0CylYge9XkY7oAigcly2SKLwFuI,7865
120
121
  linkml/validators/sparqlvalidator.py,sha256=JowuZ5KxmWkldgWIXAb8DJi7YCPm8x3it0QkgM4lSi0,4612
@@ -122,9 +123,9 @@ linkml/workspaces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
122
123
  linkml/workspaces/datamodel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
124
  linkml/workspaces/datamodel/workspaces.py,sha256=4HdkqweGNfMPqnB1_Onc9DcTfkhoagTRcqruh08nRoI,14905
124
125
  linkml/workspaces/datamodel/workspaces.yaml,sha256=EjVrwPpeRZqJRjuGyyDRxxFzuv55SiLIXPBRUG6HStU,4233
125
- linkml/workspaces/example_runner.py,sha256=hblnsZVntuwFO4vqcwl_K5XH6jxb52xCtvdc7Sfq_Yc,11452
126
- linkml-1.6.4.dist-info/LICENSE,sha256=kORMoywK6j9_iy0UvLR-a80P1Rvc9AOM4gsKlUNZABg,535
127
- linkml-1.6.4.dist-info/METADATA,sha256=rXsZaS97nlj9NvQH1poCuvB_xzVjmzg6UGzydhsrwSA,3496
128
- linkml-1.6.4.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
129
- linkml-1.6.4.dist-info/entry_points.txt,sha256=za8r49Z5gcz3rAYTZLbxw5EPZr1rGuxSe1uiRUpf8R0,2143
130
- linkml-1.6.4.dist-info/RECORD,,
126
+ linkml/workspaces/example_runner.py,sha256=uumXyPZ7xUJSZyRtjDP4TCCxgKSSOfebpufXc0_l0jY,11610
127
+ linkml-1.6.6.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
128
+ linkml-1.6.6.dist-info/LICENSE,sha256=kORMoywK6j9_iy0UvLR-a80P1Rvc9AOM4gsKlUNZABg,535
129
+ linkml-1.6.6.dist-info/entry_points.txt,sha256=za8r49Z5gcz3rAYTZLbxw5EPZr1rGuxSe1uiRUpf8R0,2143
130
+ linkml-1.6.6.dist-info/METADATA,sha256=TztvtOceZ4FBx8MPbnRdKJT2xLuWbBb_GOoA7hXLJsY,3496
131
+ linkml-1.6.6.dist-info/RECORD,,
File without changes