linkml 1.8.7__py3-none-any.whl → 1.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. linkml/generators/common/build.py +1 -7
  2. linkml/generators/common/ifabsent_processor.py +20 -20
  3. linkml/generators/common/lifecycle.py +2 -1
  4. linkml/generators/common/naming.py +1 -1
  5. linkml/generators/common/template.py +5 -5
  6. linkml/generators/common/type_designators.py +1 -3
  7. linkml/generators/csvgen.py +3 -3
  8. linkml/generators/docgen/class.md.jinja2 +1 -1
  9. linkml/generators/docgen/enum.md.jinja2 +1 -1
  10. linkml/generators/docgen/schema.md.jinja2 +1 -1
  11. linkml/generators/docgen/slot.md.jinja2 +4 -1
  12. linkml/generators/docgen/subset.md.jinja2 +1 -1
  13. linkml/generators/docgen/type.md.jinja2 +1 -1
  14. linkml/generators/docgen.py +20 -25
  15. linkml/generators/dotgen.py +4 -4
  16. linkml/generators/erdiagramgen.py +7 -7
  17. linkml/generators/excelgen.py +2 -3
  18. linkml/generators/golanggen.py +2 -2
  19. linkml/generators/golrgen.py +3 -3
  20. linkml/generators/jsonldcontextgen.py +4 -4
  21. linkml/generators/jsonschemagen.py +5 -5
  22. linkml/generators/linkmlgen.py +10 -2
  23. linkml/generators/markdowngen.py +8 -10
  24. linkml/generators/mermaidclassdiagramgen.py +2 -2
  25. linkml/generators/oocodegen.py +10 -10
  26. linkml/generators/owlgen.py +19 -18
  27. linkml/generators/plantumlgen.py +15 -15
  28. linkml/generators/prefixmapgen.py +5 -5
  29. linkml/generators/projectgen.py +10 -10
  30. linkml/generators/pydanticgen/array.py +15 -21
  31. linkml/generators/pydanticgen/build.py +4 -4
  32. linkml/generators/pydanticgen/includes.py +1 -1
  33. linkml/generators/pydanticgen/pydanticgen.py +24 -28
  34. linkml/generators/pydanticgen/template.py +36 -36
  35. linkml/generators/pythongen.py +21 -29
  36. linkml/generators/rdfgen.py +2 -2
  37. linkml/generators/shaclgen.py +19 -10
  38. linkml/generators/shexgen.py +3 -3
  39. linkml/generators/sparqlgen.py +3 -3
  40. linkml/generators/sqlalchemygen.py +2 -2
  41. linkml/generators/terminusdbgen.py +2 -3
  42. linkml/generators/typescriptgen.py +3 -3
  43. linkml/generators/yumlgen.py +13 -13
  44. linkml/linter/cli.py +1 -1
  45. linkml/linter/config/datamodel/config.py +207 -213
  46. linkml/linter/config/datamodel/config.yaml +51 -3
  47. linkml/linter/config/default.yaml +3 -0
  48. linkml/linter/formatters/markdown_formatter.py +2 -2
  49. linkml/linter/linter.py +4 -3
  50. linkml/linter/rules.py +38 -19
  51. linkml/reporting/model.py +11 -15
  52. linkml/transformers/logical_model_transformer.py +9 -8
  53. linkml/transformers/relmodel_transformer.py +6 -6
  54. linkml/transformers/schema_renamer.py +2 -2
  55. linkml/utils/converter.py +1 -1
  56. linkml/utils/deprecation.py +3 -3
  57. linkml/utils/execute_tutorial.py +5 -6
  58. linkml/utils/generator.py +17 -16
  59. linkml/utils/helpers.py +2 -2
  60. linkml/utils/logictools.py +5 -4
  61. linkml/utils/mergeutils.py +51 -5
  62. linkml/utils/schema_builder.py +8 -8
  63. linkml/utils/schema_fixer.py +8 -8
  64. linkml/utils/schemaloader.py +16 -15
  65. linkml/utils/schemasynopsis.py +29 -29
  66. linkml/utils/sqlutils.py +5 -5
  67. linkml/utils/typereferences.py +5 -6
  68. linkml/utils/validation.py +2 -2
  69. linkml/validator/cli.py +7 -6
  70. linkml/validator/loaders/delimited_file_loader.py +2 -1
  71. linkml/validator/loaders/json_loader.py +2 -1
  72. linkml/validator/loaders/loader.py +2 -1
  73. linkml/validator/loaders/passthrough_loader.py +2 -1
  74. linkml/validator/loaders/yaml_loader.py +2 -1
  75. linkml/validator/plugins/jsonschema_validation_plugin.py +2 -1
  76. linkml/validator/plugins/pydantic_validation_plugin.py +2 -1
  77. linkml/validator/plugins/recommended_slots_plugin.py +3 -2
  78. linkml/validator/plugins/shacl_validation_plugin.py +2 -1
  79. linkml/validator/plugins/validation_plugin.py +1 -1
  80. linkml/validator/report.py +3 -3
  81. linkml/validator/validator.py +3 -2
  82. linkml/validators/jsonschemavalidator.py +6 -5
  83. linkml/workspaces/datamodel/workspaces.py +21 -26
  84. linkml/workspaces/example_runner.py +7 -6
  85. {linkml-1.8.7.dist-info → linkml-1.9.1.dist-info}/METADATA +6 -9
  86. linkml-1.9.1.dist-info/RECORD +162 -0
  87. {linkml-1.8.7.dist-info → linkml-1.9.1.dist-info}/WHEEL +1 -1
  88. linkml-1.8.7.dist-info/RECORD +0 -162
  89. {linkml-1.8.7.dist-info → linkml-1.9.1.dist-info}/LICENSE +0 -0
  90. {linkml-1.8.7.dist-info → linkml-1.9.1.dist-info}/entry_points.txt +0 -0
linkml/utils/sqlutils.py CHANGED
@@ -5,7 +5,7 @@ import os
5
5
  from dataclasses import dataclass
6
6
  from pathlib import Path
7
7
  from types import ModuleType
8
- from typing import Any, List, Optional, Type, Union
8
+ from typing import Any, Optional, Union
9
9
 
10
10
  import click
11
11
  import linkml_runtime.linkml_model.meta as metamodel
@@ -137,7 +137,7 @@ class SQLStore:
137
137
  self.native_module = gen.compile_module()
138
138
  return self.native_module
139
139
 
140
- def load(self, target_class: Union[str, Type[YAMLRoot]] = None) -> YAMLRoot:
140
+ def load(self, target_class: Union[str, type[YAMLRoot]] = None) -> YAMLRoot:
141
141
  """
142
142
  Loads a LinkML object from the wrapped SQLite database
143
143
 
@@ -146,7 +146,7 @@ class SQLStore:
146
146
  """
147
147
  return self.load_all(target_class=target_class)[0]
148
148
 
149
- def load_all(self, target_class: Union[str, Type[YAMLRoot]] = None) -> List[YAMLRoot]:
149
+ def load_all(self, target_class: Union[str, type[YAMLRoot]] = None) -> list[YAMLRoot]:
150
150
  if target_class is None:
151
151
  target_class_name = infer_root_class(self.schemaview)
152
152
  target_class = self.native_module.__dict__[target_class_name]
@@ -178,7 +178,7 @@ class SQLStore:
178
178
  session.add(nu_obj)
179
179
  session.commit()
180
180
 
181
- def to_sqla_type(self, target_class: Type[YAMLRoot]) -> Any:
181
+ def to_sqla_type(self, target_class: type[YAMLRoot]) -> Any:
182
182
  for n, nu_typ in inspect.getmembers(self.module):
183
183
  if n == target_class.__name__:
184
184
  return nu_typ
@@ -235,7 +235,7 @@ class SQLStore:
235
235
  else:
236
236
  return obj
237
237
 
238
- def from_sqla(self, obj: Any) -> Optional[Union[YAMLRoot, List[YAMLRoot]]]:
238
+ def from_sqla(self, obj: Any) -> Optional[Union[YAMLRoot, list[YAMLRoot]]]:
239
239
  """
240
240
  Translate from SQLAlchemy declarative module to native LinkML
241
241
 
@@ -1,5 +1,4 @@
1
1
  from dataclasses import dataclass
2
- from typing import Set
3
2
 
4
3
  from linkml_runtime.linkml_model.meta import (
5
4
  ClassDefinitionName,
@@ -33,11 +32,11 @@ class References:
33
32
  Summary of references to a given class. The reference class is the key to the dictionary carrying classrefs
34
33
  """
35
34
 
36
- classrefs: Set[ClassDefinitionName] = empty_set() # Refs of type class
37
- slotrefs: Set[SlotDefinitionName] = empty_set() # Refs of type slot
38
- typerefs: Set[TypeDefinitionName] = empty_set() # Refs of type type
39
- subsetrefs: Set[SubsetDefinitionName] = empty_set() # Refs of type subset
40
- enumrefs: Set[EnumDefinitionName] = empty_set() # Refs of type enum
35
+ classrefs: set[ClassDefinitionName] = empty_set() # Refs of type class
36
+ slotrefs: set[SlotDefinitionName] = empty_set() # Refs of type slot
37
+ typerefs: set[TypeDefinitionName] = empty_set() # Refs of type type
38
+ subsetrefs: set[SubsetDefinitionName] = empty_set() # Refs of type subset
39
+ enumrefs: set[EnumDefinitionName] = empty_set() # Refs of type enum
41
40
 
42
41
  def addref(self, fromtype: RefType, fromname: ElementName) -> None:
43
42
  if fromtype is ClassType:
@@ -1,5 +1,5 @@
1
1
  import json
2
- from typing import TextIO, Type, Union
2
+ from typing import TextIO, Union
3
3
 
4
4
  import jsonschema
5
5
  from linkml_runtime.dumpers import json_dumper
@@ -20,7 +20,7 @@ def _as_dict(inst):
20
20
  def validate_object(
21
21
  data: YAMLRoot,
22
22
  schema: Union[str, TextIO, SchemaDefinition],
23
- target_class: Type[YAMLRoot] = None,
23
+ target_class: type[YAMLRoot] = None,
24
24
  closed: bool = True,
25
25
  ):
26
26
  """
linkml/validator/cli.py CHANGED
@@ -1,8 +1,9 @@
1
1
  import importlib
2
2
  import sys
3
3
  from collections import Counter
4
+ from collections.abc import Iterable
4
5
  from pathlib import Path
5
- from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
6
+ from typing import Any, Optional, Union
6
7
 
7
8
  import click
8
9
  import yaml
@@ -19,8 +20,8 @@ from linkml.validator.report import Severity
19
20
  class Config(BaseModel):
20
21
  schema_path: Union[str, Path] = Field(alias="schema")
21
22
  target_class: Optional[str] = None
22
- data_sources: Iterable[Union[str, Dict[str, Dict[str, str]]]] = []
23
- plugins: Optional[Dict[str, Optional[Dict[str, Any]]]] = {"JsonschemaValidationPlugin": {"closed": True}}
23
+ data_sources: Iterable[Union[str, dict[str, dict[str, str]]]] = []
24
+ plugins: Optional[dict[str, Optional[dict[str, Any]]]] = {"JsonschemaValidationPlugin": {"closed": True}}
24
25
 
25
26
 
26
27
  def _resolve_class(full_class_name: str, default_package: str, **kwargs):
@@ -37,7 +38,7 @@ def _resolve_class(full_class_name: str, default_package: str, **kwargs):
37
38
  return class_inst(**kwargs)
38
39
 
39
40
 
40
- def _resolve_plugins(plugin_config: Dict[str, Dict[str, Any]]) -> List[ValidationPlugin]:
41
+ def _resolve_plugins(plugin_config: dict[str, dict[str, Any]]) -> list[ValidationPlugin]:
41
42
  plugins = []
42
43
  for key, value in plugin_config.items():
43
44
  plugin = _resolve_class(key, "linkml.validator.plugins", **value if value else {})
@@ -45,7 +46,7 @@ def _resolve_plugins(plugin_config: Dict[str, Dict[str, Any]]) -> List[Validatio
45
46
  return plugins
46
47
 
47
48
 
48
- def _resolve_loaders(loader_config: Iterable[Union[str, Dict[str, Dict[str, str]]]]) -> List[Loader]:
49
+ def _resolve_loaders(loader_config: Iterable[Union[str, dict[str, dict[str, str]]]]) -> list[Loader]:
49
50
  loaders = []
50
51
  for entry in loader_config:
51
52
  if isinstance(entry, str):
@@ -123,7 +124,7 @@ def cli(
123
124
  schema: Optional[Path],
124
125
  target_class: Optional[str],
125
126
  config: Optional[str],
126
- data_sources: Tuple[str],
127
+ data_sources: tuple[str],
127
128
  exit_on_first_failure: bool,
128
129
  legacy_mode: bool,
129
130
  module: Optional[str],
@@ -1,7 +1,8 @@
1
1
  import csv
2
2
  import re
3
3
  from abc import ABC, abstractmethod
4
- from typing import Iterator, Optional
4
+ from collections.abc import Iterator
5
+ from typing import Optional
5
6
 
6
7
  from linkml.validator.loaders.loader import Loader
7
8
 
@@ -1,4 +1,5 @@
1
- from typing import Any, Iterator
1
+ from collections.abc import Iterator
2
+ from typing import Any
2
3
 
3
4
  from linkml_runtime.loaders import json_loader
4
5
 
@@ -1,5 +1,6 @@
1
1
  from abc import ABC, abstractmethod
2
- from typing import Any, Iterator
2
+ from collections.abc import Iterator
3
+ from typing import Any
3
4
 
4
5
 
5
6
  class Loader(ABC):
@@ -1,4 +1,5 @@
1
- from typing import Any, Iterator
1
+ from collections.abc import Iterator
2
+ from typing import Any
2
3
 
3
4
  from linkml.validator.loaders.loader import Loader
4
5
 
@@ -1,4 +1,5 @@
1
- from typing import Any, Iterator
1
+ from collections.abc import Iterator
2
+ from typing import Any
2
3
 
3
4
  import yaml
4
5
 
@@ -1,5 +1,6 @@
1
1
  import os
2
- from typing import Any, Iterator, Optional
2
+ from collections.abc import Iterator
3
+ from typing import Any, Optional
3
4
 
4
5
  from jsonschema.exceptions import best_match
5
6
 
@@ -1,4 +1,5 @@
1
- from typing import Any, Iterator
1
+ from collections.abc import Iterator
2
+ from typing import Any
2
3
 
3
4
  from linkml.validator.plugins.validation_plugin import ValidationPlugin
4
5
  from linkml.validator.report import Severity, ValidationResult
@@ -1,4 +1,5 @@
1
- from typing import Iterator, List, Optional
1
+ from collections.abc import Iterator
2
+ from typing import Optional
2
3
 
3
4
  from linkml.validator.plugins.validation_plugin import ValidationPlugin
4
5
  from linkml.validator.report import Severity, ValidationResult
@@ -10,7 +11,7 @@ class RecommendedSlotsPlugin(ValidationPlugin):
10
11
 
11
12
  def process(self, instance: dict, context: ValidationContext) -> Iterator[ValidationResult]:
12
13
  def _do_process(
13
- instance: dict, class_name: str, location: Optional[List[str]] = None
14
+ instance: dict, class_name: str, location: Optional[list[str]] = None
14
15
  ) -> Iterator[ValidationResult]:
15
16
  if not isinstance(instance, dict):
16
17
  return
@@ -1,5 +1,6 @@
1
1
  import os
2
- from typing import Any, Iterator, Optional
2
+ from collections.abc import Iterator
3
+ from typing import Any, Optional
3
4
 
4
5
  import rdflib
5
6
  from linkml_runtime.dumpers import rdflib_dumper
@@ -1,5 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
- from typing import Iterator
2
+ from collections.abc import Iterator
3
3
 
4
4
  from linkml.validator.report import ValidationResult
5
5
  from linkml.validator.validation_context import ValidationContext
@@ -1,5 +1,5 @@
1
1
  from enum import Enum
2
- from typing import Any, List, Optional
2
+ from typing import Any, Optional
3
3
 
4
4
  from pydantic import BaseModel, Field
5
5
 
@@ -28,7 +28,7 @@ class ValidationResult(BaseModel):
28
28
  instance: Optional[Any] = None
29
29
  instance_index: Optional[int] = None
30
30
  instantiates: Optional[str] = None
31
- context: List[str] = []
31
+ context: list[str] = []
32
32
 
33
33
  # The source object that caused this validation result
34
34
  source: Any = Field(None, description="The source of this validation result", exclude=True)
@@ -41,4 +41,4 @@ class ValidationReport(BaseModel):
41
41
  A report object.
42
42
  """
43
43
 
44
- results: List[ValidationResult]
44
+ results: list[ValidationResult]
@@ -1,6 +1,7 @@
1
+ from collections.abc import Iterator
1
2
  from functools import lru_cache
2
3
  from pathlib import Path
3
- from typing import Any, Iterator, List, Optional, TextIO, Union
4
+ from typing import Any, Optional, TextIO, Union
4
5
 
5
6
  from linkml_runtime.linkml_model import SchemaDefinition
6
7
  from linkml_runtime.loaders import yaml_loader
@@ -27,7 +28,7 @@ class Validator:
27
28
  def __init__(
28
29
  self,
29
30
  schema: Union[str, dict, TextIO, Path, SchemaDefinition],
30
- validation_plugins: Optional[List[ValidationPlugin]] = None,
31
+ validation_plugins: Optional[list[ValidationPlugin]] = None,
31
32
  *,
32
33
  strict: bool = False,
33
34
  ) -> None:
@@ -1,8 +1,9 @@
1
1
  import logging
2
2
  import sys
3
+ from collections.abc import Iterable
3
4
  from dataclasses import asdict, dataclass, field
4
- from functools import lru_cache
5
- from typing import Any, Iterable, List, Type, Union
5
+ from functools import cache
6
+ from typing import Any, Union
6
7
 
7
8
  import click
8
9
  import jsonschema
@@ -28,7 +29,7 @@ class HashableSchemaDefinition(SchemaDefinition):
28
29
  return hash(self.id)
29
30
 
30
31
 
31
- @lru_cache(maxsize=None)
32
+ @cache
32
33
  def _generate_jsonschema(schema, top_class, closed, include_range_class_descendants):
33
34
  deprecation_warning("validators")
34
35
  logger.debug("Generating JSON Schema")
@@ -43,7 +44,7 @@ def _generate_jsonschema(schema, top_class, closed, include_range_class_descenda
43
44
 
44
45
 
45
46
  class JsonSchemaDataValidatorError(Exception):
46
- def __init__(self, validation_messages: List[str]) -> None:
47
+ def __init__(self, validation_messages: list[str]) -> None:
47
48
  deprecation_warning("validators")
48
49
  super().__init__("\n".join(validation_messages))
49
50
  self.validation_messages = validation_messages
@@ -72,7 +73,7 @@ class JsonSchemaDataValidator(DataValidator):
72
73
  deprecation_warning("validators")
73
74
  pass
74
75
 
75
- def validate_object(self, data: YAMLRoot, target_class: Type[YAMLRoot] = None, closed: bool = True) -> None:
76
+ def validate_object(self, data: YAMLRoot, target_class: type[YAMLRoot] = None, closed: bool = True) -> None:
76
77
  """
77
78
  validates instance data against a schema
78
79
 
@@ -7,14 +7,12 @@
7
7
  # single user or agent
8
8
  # license: https://creativecommons.org/publicdomain/zero/1.0/
9
9
 
10
- import dataclasses
11
10
  from dataclasses import dataclass
12
- from typing import Any, ClassVar, Dict, List, Optional, Union
11
+ from typing import Any, ClassVar, Optional, Union
13
12
 
14
13
  from jsonasobj2 import as_dict
15
14
  from linkml_runtime.linkml_model.types import String
16
15
  from linkml_runtime.utils.curienamespace import CurieNamespace
17
- from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs
18
16
  from linkml_runtime.utils.metamodelcore import Bool, XSDDateTime, empty_dict, empty_list
19
17
  from linkml_runtime.utils.slot import Slot
20
18
  from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str
@@ -23,9 +21,6 @@ from rdflib import URIRef
23
21
  metamodel_version = "1.7.0"
24
22
  version = None
25
23
 
26
- # Overwrite dataclasses _init_fn to add **kwargs in __init__
27
- dataclasses._init_fn = dataclasses_init_fn_with_kwargs
28
-
29
24
  # Namespaces
30
25
  CSVW = CurieNamespace("csvw", "http://www.w3.org/ns/csvw#")
31
26
  DCAT = CurieNamespace("dcat", "http://www.w3.org/ns/dcat#")
@@ -84,7 +79,7 @@ class Project(YAMLRoot):
84
79
  A project consists of a single root schema
85
80
  """
86
81
 
87
- _inherited_slots: ClassVar[List[str]] = []
82
+ _inherited_slots: ClassVar[list[str]] = []
88
83
 
89
84
  class_class_uri: ClassVar[URIRef] = WORKSPACE.Project
90
85
  class_class_curie: ClassVar[str] = "workspace:Project"
@@ -98,18 +93,18 @@ class Project(YAMLRoot):
98
93
  schema: Optional[Union[dict, Any]] = None
99
94
  description: Optional[str] = None
100
95
  source_schema_path: Optional[Union[str, FileSystemPath]] = None
101
- data_files: Optional[Union[Union[str, FileSystemPath], List[Union[str, FileSystemPath]]]] = empty_list()
96
+ data_files: Optional[Union[Union[str, FileSystemPath], list[Union[str, FileSystemPath]]]] = empty_list()
102
97
  source_google_sheet_docs: Optional[
103
98
  Union[
104
- Dict[Union[str, GoogleSheetsDocId], Union[dict, "GoogleSheetsDoc"]],
105
- List[Union[dict, "GoogleSheetsDoc"]],
99
+ dict[Union[str, GoogleSheetsDocId], Union[dict, "GoogleSheetsDoc"]],
100
+ list[Union[dict, "GoogleSheetsDoc"]],
106
101
  ]
107
102
  ] = empty_dict()
108
103
  project_directory: Optional[Union[str, FileSystemPath]] = None
109
104
  external_project_path: Optional[Union[str, FileSystemPath]] = None
110
105
  last_saved: Optional[Union[str, XSDDateTime]] = None
111
106
 
112
- def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
107
+ def __post_init__(self, *_: list[str], **kwargs: dict[str, Any]):
113
108
  if self._is_empty(self.name):
114
109
  self.MissingRequiredField("name")
115
110
  if not isinstance(self.name, ProjectName):
@@ -159,7 +154,7 @@ class GoogleSheetsDoc(YAMLRoot):
159
154
  A google sheets document can contain multiple individual sheets
160
155
  """
161
156
 
162
- _inherited_slots: ClassVar[List[str]] = []
157
+ _inherited_slots: ClassVar[list[str]] = []
163
158
 
164
159
  class_class_uri: ClassVar[URIRef] = WORKSPACE.GoogleSheetsDoc
165
160
  class_class_curie: ClassVar[str] = "workspace:GoogleSheetsDoc"
@@ -167,9 +162,9 @@ class GoogleSheetsDoc(YAMLRoot):
167
162
  class_model_uri: ClassVar[URIRef] = WORKSPACE.GoogleSheetsDoc
168
163
 
169
164
  id: Union[str, GoogleSheetsDocId] = None
170
- sheet_ids: Optional[Union[str, List[str]]] = empty_list()
165
+ sheet_ids: Optional[Union[str, list[str]]] = empty_list()
171
166
 
172
- def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
167
+ def __post_init__(self, *_: list[str], **kwargs: dict[str, Any]):
173
168
  if self._is_empty(self.id):
174
169
  self.MissingRequiredField("id")
175
170
  if not isinstance(self.id, GoogleSheetsDocId):
@@ -184,7 +179,7 @@ class GoogleSheetsDoc(YAMLRoot):
184
179
 
185
180
  @dataclass
186
181
  class GitHubAccount(YAMLRoot):
187
- _inherited_slots: ClassVar[List[str]] = []
182
+ _inherited_slots: ClassVar[list[str]] = []
188
183
 
189
184
  class_class_uri: ClassVar[URIRef] = WORKSPACE.GitHubAccount
190
185
  class_class_curie: ClassVar[str] = "workspace:GitHubAccount"
@@ -194,7 +189,7 @@ class GitHubAccount(YAMLRoot):
194
189
  username: Optional[str] = None
195
190
  password: Optional[str] = None
196
191
 
197
- def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
192
+ def __post_init__(self, *_: list[str], **kwargs: dict[str, Any]):
198
193
  if self.username is not None and not isinstance(self.username, str):
199
194
  self.username = str(self.username)
200
195
 
@@ -210,7 +205,7 @@ class Workspace(YAMLRoot):
210
205
  A workspace is a collection of projects managed locally on a file system
211
206
  """
212
207
 
213
- _inherited_slots: ClassVar[List[str]] = []
208
+ _inherited_slots: ClassVar[list[str]] = []
214
209
 
215
210
  class_class_uri: ClassVar[URIRef] = WORKSPACE.Workspace
216
211
  class_class_curie: ClassVar[str] = "workspace:Workspace"
@@ -219,15 +214,15 @@ class Workspace(YAMLRoot):
219
214
 
220
215
  projects: Optional[
221
216
  Union[
222
- Dict[Union[str, ProjectName], Union[dict, Project]],
223
- List[Union[dict, Project]],
217
+ dict[Union[str, ProjectName], Union[dict, Project]],
218
+ list[Union[dict, Project]],
224
219
  ]
225
220
  ] = empty_dict()
226
221
  github_account: Optional[Union[dict, GitHubAccount]] = None
227
222
  projects_directory: Optional[Union[str, FileSystemPath]] = None
228
223
  autosync: Optional[Union[bool, Bool]] = None
229
224
 
230
- def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
225
+ def __post_init__(self, *_: list[str], **kwargs: dict[str, Any]):
231
226
  self._normalize_inlined_as_list(slot_name="projects", slot_type=Project, key_name="name", keyed=True)
232
227
 
233
228
  if self.github_account is not None and not isinstance(self.github_account, GitHubAccount):
@@ -319,7 +314,7 @@ slots.project__data_files = Slot(
319
314
  curie=WORKSPACE.curie("data_files"),
320
315
  model_uri=WORKSPACE.project__data_files,
321
316
  domain=None,
322
- range=Optional[Union[Union[str, FileSystemPath], List[Union[str, FileSystemPath]]]],
317
+ range=Optional[Union[Union[str, FileSystemPath], list[Union[str, FileSystemPath]]]],
323
318
  )
324
319
 
325
320
  slots.project__source_google_sheet_docs = Slot(
@@ -330,8 +325,8 @@ slots.project__source_google_sheet_docs = Slot(
330
325
  domain=None,
331
326
  range=Optional[
332
327
  Union[
333
- Dict[Union[str, GoogleSheetsDocId], Union[dict, GoogleSheetsDoc]],
334
- List[Union[dict, GoogleSheetsDoc]],
328
+ dict[Union[str, GoogleSheetsDocId], Union[dict, GoogleSheetsDoc]],
329
+ list[Union[dict, GoogleSheetsDoc]],
335
330
  ]
336
331
  ],
337
332
  )
@@ -378,7 +373,7 @@ slots.googleSheetsDoc__sheet_ids = Slot(
378
373
  curie=WORKSPACE.curie("sheet_ids"),
379
374
  model_uri=WORKSPACE.googleSheetsDoc__sheet_ids,
380
375
  domain=None,
381
- range=Optional[Union[str, List[str]]],
376
+ range=Optional[Union[str, list[str]]],
382
377
  )
383
378
 
384
379
  slots.gitHubAccount__username = Slot(
@@ -407,8 +402,8 @@ slots.workspace__projects = Slot(
407
402
  domain=None,
408
403
  range=Optional[
409
404
  Union[
410
- Dict[Union[str, ProjectName], Union[dict, Project]],
411
- List[Union[dict, Project]],
405
+ dict[Union[str, ProjectName], Union[dict, Project]],
406
+ list[Union[dict, Project]],
412
407
  ]
413
408
  ],
414
409
  )
@@ -5,11 +5,12 @@ import json
5
5
  import logging
6
6
  import os
7
7
  import sys
8
+ from collections.abc import Mapping
8
9
  from dataclasses import dataclass, field
9
10
  from io import StringIO
10
11
  from pathlib import Path
11
12
  from types import ModuleType
12
- from typing import Any, List, Mapping, Optional, TextIO, Union
13
+ from typing import Any, Optional, TextIO, Union
13
14
 
14
15
  import click
15
16
  import yaml
@@ -34,9 +35,9 @@ class SummaryDocument:
34
35
 
35
36
  text: StringIO = field(default_factory=lambda: StringIO())
36
37
 
37
- inputs: List[str] = field(default_factory=list)
38
+ inputs: list[str] = field(default_factory=list)
38
39
 
39
- outputs: List[str] = field(default_factory=list)
40
+ outputs: list[str] = field(default_factory=list)
40
41
 
41
42
  def add(self, *lines: str):
42
43
  for line in lines:
@@ -58,7 +59,7 @@ class ExampleRunner:
58
59
  input_directory: Optional[Path] = None
59
60
  """Directory in which positive instance examples are found."""
60
61
 
61
- input_formats: Optional[List[str]] = field(default_factory=lambda: ["yaml"])
62
+ input_formats: Optional[list[str]] = field(default_factory=lambda: ["yaml"])
62
63
 
63
64
  counter_example_input_directory: Optional[Path] = None
64
65
  """Directory in which negative instance examples are found. These are expected to fail."""
@@ -66,7 +67,7 @@ class ExampleRunner:
66
67
  output_directory: Optional[Path] = None
67
68
  """Directory where processed examples are written to."""
68
69
 
69
- output_formats: Optional[List[str]] = field(default_factory=lambda: ["yaml", "json", "ttl"])
70
+ output_formats: Optional[list[str]] = field(default_factory=lambda: ["yaml", "json", "ttl"])
70
71
 
71
72
  schemaview: Optional[SchemaView] = None
72
73
  """View over schema which all examples adhere to."""
@@ -139,7 +140,7 @@ class ExampleRunner:
139
140
  self.process_examples_from_list(input_examples, fmt, False)
140
141
  self.process_examples_from_list(input_counter_examples, fmt, True)
141
142
 
142
- def example_source_inputs(self, class_name: str = None) -> List[str]:
143
+ def example_source_inputs(self, class_name: str = None) -> list[str]:
143
144
  """
144
145
  Get the list of example source inputs.
145
146
 
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: linkml
3
- Version: 1.8.7
3
+ Version: 1.9.1
4
4
  Summary: Linked Open Data Modeling Language
5
5
  Home-page: https://linkml.io/linkml/
6
6
  Keywords: schema,linked data,data modeling,rdf,owl,biolink
7
7
  Author: Chris Mungall
8
8
  Author-email: cjmungall@lbl.gov
9
- Requires-Python: >=3.8.1,<4.0.0
9
+ Requires-Python: >=3.9.0,<4.0.0
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
12
12
  Classifier: Intended Audience :: Developers
@@ -17,11 +17,9 @@ Classifier: Programming Language :: Python :: 3
17
17
  Classifier: Programming Language :: Python :: 3.9
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
- Classifier: Programming Language :: Python :: 3.10
21
- Classifier: Programming Language :: Python :: 3.11
22
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
23
22
  Classifier: Programming Language :: Python :: 3.8
24
- Classifier: Programming Language :: Python :: 3.9
25
23
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
26
24
  Provides-Extra: black
27
25
  Provides-Extra: numpydantic
@@ -34,11 +32,10 @@ Requires-Dist: graphviz (>=0.10.1)
34
32
  Requires-Dist: hbreader
35
33
  Requires-Dist: isodate (>=0.6.0)
36
34
  Requires-Dist: jinja2 (>=3.1.0)
37
- Requires-Dist: jsonasobj2 (>=1.0.3,<2.0.0)
35
+ Requires-Dist: jsonasobj2 (>=1.0.3,<2.dev0)
38
36
  Requires-Dist: jsonschema[format] (>=4.0.0)
39
- Requires-Dist: linkml-dataops
40
- Requires-Dist: linkml-runtime (>=1.8.1,<2.0.0)
41
- Requires-Dist: numpydantic (>=1.6.1) ; (python_version >= "3.9") and (extra == "numpydantic" or extra == "tests")
37
+ Requires-Dist: linkml-runtime (>=1.9.1,<2.0.0)
38
+ Requires-Dist: numpydantic (>=1.6.1) ; extra == "numpydantic" or extra == "tests"
42
39
  Requires-Dist: openpyxl
43
40
  Requires-Dist: parse
44
41
  Requires-Dist: prefixcommons (>=0.1.7)