linkml 1.5.5__py3-none-any.whl → 1.5.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. linkml/__init__.py +2 -6
  2. linkml/_version.py +1 -1
  3. linkml/generators/PythonGenNotes.md +4 -4
  4. linkml/generators/__init__.py +26 -5
  5. linkml/generators/common/type_designators.py +27 -22
  6. linkml/generators/csvgen.py +4 -10
  7. linkml/generators/docgen/class.md.jinja2 +7 -0
  8. linkml/generators/docgen/class_diagram.md.jinja2 +0 -6
  9. linkml/generators/docgen/subset.md.jinja2 +54 -13
  10. linkml/generators/docgen.py +94 -92
  11. linkml/generators/dotgen.py +5 -9
  12. linkml/generators/erdiagramgen.py +58 -53
  13. linkml/generators/excelgen.py +10 -16
  14. linkml/generators/golanggen.py +11 -21
  15. linkml/generators/golrgen.py +4 -13
  16. linkml/generators/graphqlgen.py +3 -11
  17. linkml/generators/javagen.py +8 -15
  18. linkml/generators/jsonldcontextgen.py +7 -36
  19. linkml/generators/jsonldgen.py +14 -12
  20. linkml/generators/jsonschemagen.py +183 -136
  21. linkml/generators/linkmlgen.py +1 -1
  22. linkml/generators/markdowngen.py +40 -89
  23. linkml/generators/namespacegen.py +1 -2
  24. linkml/generators/oocodegen.py +22 -25
  25. linkml/generators/owlgen.py +48 -49
  26. linkml/generators/prefixmapgen.py +6 -14
  27. linkml/generators/projectgen.py +7 -14
  28. linkml/generators/protogen.py +3 -5
  29. linkml/generators/pydanticgen.py +85 -73
  30. linkml/generators/pythongen.py +89 -157
  31. linkml/generators/rdfgen.py +5 -11
  32. linkml/generators/shaclgen.py +32 -18
  33. linkml/generators/shexgen.py +19 -24
  34. linkml/generators/sparqlgen.py +5 -13
  35. linkml/generators/sqlalchemy/__init__.py +3 -2
  36. linkml/generators/sqlalchemy/sqlalchemy_declarative_template.py +7 -7
  37. linkml/generators/sqlalchemy/sqlalchemy_imperative_template.py +3 -3
  38. linkml/generators/sqlalchemygen.py +29 -27
  39. linkml/generators/sqlddlgen.py +34 -26
  40. linkml/generators/sqltablegen.py +21 -21
  41. linkml/generators/sssomgen.py +11 -13
  42. linkml/generators/summarygen.py +2 -4
  43. linkml/generators/terminusdbgen.py +7 -19
  44. linkml/generators/typescriptgen.py +10 -18
  45. linkml/generators/yamlgen.py +0 -2
  46. linkml/generators/yumlgen.py +23 -71
  47. linkml/linter/cli.py +4 -11
  48. linkml/linter/config/datamodel/config.py +17 -47
  49. linkml/linter/linter.py +2 -4
  50. linkml/linter/rules.py +34 -48
  51. linkml/reporting/__init__.py +2 -0
  52. linkml/reporting/model.py +9 -24
  53. linkml/transformers/relmodel_transformer.py +20 -33
  54. linkml/transformers/schema_renamer.py +14 -10
  55. linkml/utils/converter.py +15 -15
  56. linkml/utils/datautils.py +9 -24
  57. linkml/utils/datavalidator.py +2 -2
  58. linkml/utils/execute_tutorial.py +10 -12
  59. linkml/utils/generator.py +74 -92
  60. linkml/utils/helpers.py +4 -2
  61. linkml/utils/ifabsent_functions.py +23 -15
  62. linkml/utils/mergeutils.py +19 -35
  63. linkml/utils/rawloader.py +2 -6
  64. linkml/utils/schema_builder.py +31 -19
  65. linkml/utils/schema_fixer.py +28 -18
  66. linkml/utils/schemaloader.py +44 -89
  67. linkml/utils/schemasynopsis.py +50 -73
  68. linkml/utils/sqlutils.py +40 -30
  69. linkml/utils/typereferences.py +9 -6
  70. linkml/utils/validation.py +4 -5
  71. linkml/validators/__init__.py +2 -0
  72. linkml/validators/jsonschemavalidator.py +104 -53
  73. linkml/validators/sparqlvalidator.py +5 -15
  74. linkml/workspaces/datamodel/workspaces.py +13 -30
  75. linkml/workspaces/example_runner.py +75 -68
  76. {linkml-1.5.5.dist-info → linkml-1.5.7.dist-info}/METADATA +2 -2
  77. linkml-1.5.7.dist-info/RECORD +109 -0
  78. linkml-1.5.5.dist-info/RECORD +0 -109
  79. {linkml-1.5.5.dist-info → linkml-1.5.7.dist-info}/LICENSE +0 -0
  80. {linkml-1.5.5.dist-info → linkml-1.5.7.dist-info}/WHEEL +0 -0
  81. {linkml-1.5.5.dist-info → linkml-1.5.7.dist-info}/entry_points.txt +0 -0
linkml/utils/sqlutils.py CHANGED
@@ -11,14 +11,14 @@ import click
11
11
  import linkml_runtime.linkml_model.meta as metamodel
12
12
  from linkml_runtime import SchemaView
13
13
  from linkml_runtime.dumpers import yaml_dumper
14
- from linkml_runtime.linkml_model import PermissibleValue, SchemaDefinition
14
+ from linkml_runtime.linkml_model import SchemaDefinition
15
15
  from linkml_runtime.utils.compile_python import compile_python
16
16
  from linkml_runtime.utils.enumerations import EnumDefinitionImpl
17
17
  from linkml_runtime.utils.formatutils import underscore
18
18
  from linkml_runtime.utils.introspection import package_schemaview
19
19
  from linkml_runtime.utils.yamlutils import YAMLRoot
20
20
  from pydantic import BaseModel
21
- from sqlalchemy import create_engine
21
+ from sqlalchemy import StaticPool, create_engine
22
22
  from sqlalchemy.engine import Engine
23
23
  from sqlalchemy.ext.associationproxy import _AssociationCollection
24
24
  from sqlalchemy.orm import sessionmaker
@@ -28,8 +28,16 @@ from linkml.generators.pythongen import PythonGenerator
28
28
  from linkml.generators.sqlalchemygen import SQLAlchemyGenerator, TemplateEnum
29
29
  from linkml.generators.sqltablegen import SQLTableGenerator
30
30
  from linkml.utils import datautils, validation
31
- from linkml.utils.datautils import (_get_context, _get_format, _is_xsv, dumpers_loaders,
32
- get_dumper, get_loader, infer_root_class, infer_index_slot)
31
+ from linkml.utils.datautils import (
32
+ _get_context,
33
+ _get_format,
34
+ _is_xsv,
35
+ dumpers_loaders,
36
+ get_dumper,
37
+ get_loader,
38
+ infer_index_slot,
39
+ infer_root_class,
40
+ )
33
41
 
34
42
 
35
43
  @dataclass
@@ -53,11 +61,14 @@ class SQLStore:
53
61
  schemaview: SchemaView = None
54
62
  engine: Engine = None
55
63
  database_path: str = None
64
+ use_memory: bool = False
65
+ """https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#using-a-memory-database-in-multiple-threads"""
66
+
56
67
  module: ModuleType = None
57
68
  native_module: ModuleType = None
58
69
  include_schema_in_database: bool = None
59
70
 
60
- def db_exists(self, create=True, force=False) -> str:
71
+ def db_exists(self, create=True, force=False) -> Optional[str]:
61
72
  """
62
73
  check if database exists, optionally create if not present
63
74
 
@@ -65,13 +76,21 @@ class SQLStore:
65
76
  :param force: recreate database, destroying any content if previously present
66
77
  :return: path
67
78
  """
68
- if not self.database_path:
69
- raise ValueError("database_path not set")
70
- db_exists = os.path.exists(self.database_path)
79
+ if self.use_memory:
80
+ db_exists = False
81
+ else:
82
+ if not self.database_path:
83
+ raise ValueError("database_path not set")
84
+ db_exists = os.path.exists(self.database_path)
71
85
  if force or (create and not db_exists):
72
- if force:
73
- Path(self.database_path).unlink(missing_ok=True)
74
- self.engine = create_engine(f"sqlite:///{self.database_path}")
86
+ if self.use_memory:
87
+ self.engine = create_engine(
88
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
89
+ )
90
+ else:
91
+ if force:
92
+ Path(self.database_path).unlink(missing_ok=True)
93
+ self.engine = create_engine(f"sqlite:///{self.database_path}")
75
94
  with self.engine.connect() as con:
76
95
  ddl = SQLTableGenerator(self.schema).generate_ddl()
77
96
  con.connection.executescript(ddl)
@@ -79,6 +98,8 @@ class SQLStore:
79
98
  metamodel_sv = package_schemaview(metamodel.__name__)
80
99
  meta_ddl = SQLTableGenerator(metamodel_sv.schema).generate_ddl()
81
100
  con.connection.executescript(meta_ddl)
101
+ if self.use_memory:
102
+ return None
82
103
  if not os.path.exists(self.database_path):
83
104
  raise ValueError(f"No database: {self.database_path}")
84
105
  return self.database_path
@@ -118,9 +139,7 @@ class SQLStore:
118
139
  """
119
140
  return self.load_all(target_class=target_class)[0]
120
141
 
121
- def load_all(
122
- self, target_class: Union[str, Type[YAMLRoot]] = None
123
- ) -> List[YAMLRoot]:
142
+ def load_all(self, target_class: Union[str, Type[YAMLRoot]] = None) -> List[YAMLRoot]:
124
143
  if target_class is None:
125
144
  target_class_name = infer_root_class(self.schemaview)
126
145
  target_class = self.native_module.__dict__[target_class_name]
@@ -132,7 +151,7 @@ class SQLStore:
132
151
  q = session.query(typ)
133
152
  all_objs = q.all()
134
153
  tmp = self.from_sqla(all_objs)
135
- return tmp
154
+ return tmp
136
155
 
137
156
  def dump(self, element: YAMLRoot, append=True) -> None:
138
157
  """
@@ -225,10 +244,6 @@ class SQLStore:
225
244
  cls = nm[typ.__name__]
226
245
  else:
227
246
  cls = None
228
- try:
229
- kvs = vars(obj).items()
230
- except TypeError:
231
- kvs = None
232
247
  if isinstance(obj, list) or isinstance(obj, _AssociationCollection):
233
248
  nu_obj = [self.from_sqla(x) for x in obj]
234
249
  if nu_obj:
@@ -236,7 +251,6 @@ class SQLStore:
236
251
  else:
237
252
  return None
238
253
  elif cls:
239
- nu_cls = self.from_sqla_type(typ)
240
254
  inst_args = {}
241
255
  for sn in self.schemaview.class_slots(cls.name):
242
256
  sn = underscore(sn)
@@ -295,9 +309,7 @@ def main(verbose: int, quiet: bool, csv_field_size_limit: int):
295
309
  "-C",
296
310
  help="name of class in datamodel that the root node instantiates",
297
311
  )
298
- @click.option(
299
- "--index-slot", "-S", help="top level slot. Required for CSV dumping/loading"
300
- )
312
+ @click.option("--index-slot", "-S", help="top level slot. Required for CSV dumping/loading")
301
313
  @click.option("--schema", "-s", help="Path to schema specified as LinkML yaml")
302
314
  @click.option(
303
315
  "--validate/--no-validate",
@@ -341,10 +353,10 @@ def dump(
341
353
  sv = SchemaView(schema)
342
354
  if target_class is None:
343
355
  if sv is None:
344
- raise ValueError(f"Must specify schema if not target class is specified")
356
+ raise ValueError("Must specify schema if not target class is specified")
345
357
  target_class = infer_root_class(sv)
346
358
  if target_class is None:
347
- raise Exception(f"target class not specified and could not be inferred")
359
+ raise Exception("target class not specified and could not be inferred")
348
360
  py_target_class = python_module.__dict__[target_class]
349
361
  input_format = _get_format(input, input_format)
350
362
  loader = get_loader(input_format)
@@ -352,7 +364,7 @@ def dump(
352
364
  inargs = {}
353
365
  if datautils._is_rdf_format(input_format):
354
366
  if sv is None:
355
- raise Exception(f"Must pass schema arg")
367
+ raise Exception("Must pass schema arg")
356
368
  inargs["schemaview"] = sv
357
369
  inargs["fmt"] = input_format
358
370
  if _is_xsv(input_format):
@@ -393,9 +405,7 @@ def dump(
393
405
  "-C",
394
406
  help="name of class in datamodel that the root node instantiates",
395
407
  )
396
- @click.option(
397
- "--index-slot", "-S", help="top level slot. Required for CSV dumping/loading"
398
- )
408
+ @click.option("--index-slot", "-S", help="top level slot. Required for CSV dumping/loading")
399
409
  @click.option("--schema", "-s", help="Path to schema specified as LinkML yaml")
400
410
  @click.option(
401
411
  "--validate/--no-validate",
@@ -458,7 +468,7 @@ def load(
458
468
  outargs["fmt"] = "json-ld"
459
469
  if output_format == "rdf" or output_format == "ttl":
460
470
  if sv is None:
461
- raise Exception(f"Must pass schema arg")
471
+ raise Exception("Must pass schema arg")
462
472
  outargs["schemaview"] = sv
463
473
  if _is_xsv(output_format):
464
474
  if index_slot is None:
@@ -1,11 +1,14 @@
1
1
  from dataclasses import dataclass
2
- from typing import Set, cast
2
+ from typing import Set
3
3
 
4
- from linkml_runtime.linkml_model.meta import (ClassDefinitionName, ElementName,
5
- EnumDefinitionName,
6
- SlotDefinitionName,
7
- SubsetDefinitionName,
8
- TypeDefinitionName)
4
+ from linkml_runtime.linkml_model.meta import (
5
+ ClassDefinitionName,
6
+ ElementName,
7
+ EnumDefinitionName,
8
+ SlotDefinitionName,
9
+ SubsetDefinitionName,
10
+ TypeDefinitionName,
11
+ )
9
12
  from linkml_runtime.utils.metamodelcore import empty_set
10
13
 
11
14
 
@@ -1,13 +1,10 @@
1
1
  import json
2
- import logging
3
- import sys
4
2
  from typing import TextIO, Type, Union
5
3
 
6
4
  import jsonschema
7
5
  from linkml_runtime.dumpers import json_dumper
8
6
  from linkml_runtime.linkml_model import SchemaDefinition
9
- from linkml_runtime.utils.schemaview import SchemaView
10
- from linkml_runtime.utils.yamlutils import YAMLRoot, as_dict
7
+ from linkml_runtime.utils.yamlutils import YAMLRoot
11
8
 
12
9
  from linkml.generators.jsonschemagen import JsonSchemaGenerator
13
10
 
@@ -46,4 +43,6 @@ def validate_object(
46
43
  not_closed=not_closed,
47
44
  ).serialize(not_closed=not_closed)
48
45
  jsonschema_obj = json.loads(jsonschemastr)
49
- return jsonschema.validate(inst_dict, schema=jsonschema_obj, format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER)
46
+ return jsonschema.validate(
47
+ inst_dict, schema=jsonschema_obj, format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER
48
+ )
@@ -1,2 +1,4 @@
1
1
  from linkml.validators.jsonschemavalidator import JsonSchemaDataValidator
2
2
  from linkml.validators.sparqlvalidator import SparqlDataValidator
3
+
4
+ __all__ = ["JsonSchemaDataValidator", "SparqlDataValidator"]
@@ -1,13 +1,13 @@
1
- import json
2
1
  import logging
3
2
  import sys
4
- from dataclasses import dataclass
5
- from typing import TextIO, Type, Union, Dict, Optional
3
+ from dataclasses import asdict, dataclass, field
4
+ from functools import lru_cache
5
+ from typing import Any, Iterable, List, Type, Union
6
6
 
7
7
  import click
8
8
  import jsonschema
9
- from linkml_runtime.dumpers import json_dumper
10
- from linkml_runtime.linkml_model import SchemaDefinition, ClassDefinitionName
9
+ from jsonschema.exceptions import best_match
10
+ from linkml_runtime.linkml_model import ClassDefinitionName, SchemaDefinition
11
11
  from linkml_runtime.utils.compile_python import compile_python
12
12
  from linkml_runtime.utils.dictutils import as_simple_dict
13
13
  from linkml_runtime.utils.schemaview import SchemaView
@@ -20,17 +20,50 @@ from linkml.utils import datautils
20
20
  from linkml.utils.datavalidator import DataValidator
21
21
 
22
22
 
23
+ class HashableSchemaDefinition(SchemaDefinition):
24
+ def __hash__(self) -> int:
25
+ return hash(self.id)
26
+
27
+
28
+ @lru_cache(maxsize=None)
29
+ def _generate_jsonschema(schema, top_class, closed, include_range_class_descendants):
30
+ logging.debug("Generating JSON Schema")
31
+ not_closed = not closed
32
+ return JsonSchemaGenerator(
33
+ schema=schema,
34
+ mergeimports=True,
35
+ top_class=top_class,
36
+ not_closed=not_closed,
37
+ include_range_class_descendants=include_range_class_descendants,
38
+ ).generate()
39
+
40
+
41
+ class JsonSchemaDataValidatorError(Exception):
42
+ def __init__(self, validation_messages: List[str]) -> None:
43
+ super().__init__("\n".join(validation_messages))
44
+ self.validation_messages = validation_messages
45
+
46
+
23
47
  @dataclass
24
48
  class JsonSchemaDataValidator(DataValidator):
25
49
  """
26
50
  Implementation of DataValidator that wraps jsonschema validation
27
51
  """
28
52
 
29
- jsonschema_objs: Optional[Dict[str, Dict]] = None
30
- """Cached outputs of jsonschema generation"""
53
+ include_range_class_descendants: bool = False
54
+ _hashable_schema: Union[str, HashableSchemaDefinition] = field(init=False, repr=False)
55
+
56
+ def __setattr__(self, __name: str, __value: Any) -> None:
57
+ if __name == "schema":
58
+ if isinstance(__value, SchemaDefinition):
59
+ self._hashable_schema = HashableSchemaDefinition(**asdict(__value))
60
+ else:
61
+ self._hashable_schema = __value
62
+ return super().__setattr__(__name, __value)
31
63
 
32
64
  def validate_file(self, input: str, format: str = "json", **kwargs):
33
- return self.validate_object(obj)
65
+ # return self.validate_object(obj)
66
+ pass
34
67
 
35
68
  def validate_object(
36
69
  self, data: YAMLRoot, target_class: Type[YAMLRoot] = None, closed: bool = True
@@ -46,26 +79,7 @@ class JsonSchemaDataValidator(DataValidator):
46
79
  if target_class is None:
47
80
  target_class = type(data)
48
81
  inst_dict = as_simple_dict(data)
49
- not_closed = not closed
50
- if self.schema is None:
51
- raise ValueError(f"schema object must be set")
52
- if self.jsonschema_objs is None:
53
- self.jsonschema_objs = {}
54
- schema_id = self.schema.id if isinstance(self.schema, SchemaDefinition) else self.schema
55
- cache_params = frozenset([schema_id, target_class.class_name])
56
- if cache_params not in self.jsonschema_objs:
57
- jsonschemastr = JsonSchemaGenerator(
58
- self.schema,
59
- mergeimports=True,
60
- top_class=target_class.class_name,
61
- not_closed=not_closed,
62
- ).serialize(not_closed=not_closed)
63
- jsonschema_obj = json.loads(jsonschemastr)
64
- self.jsonschema_objs[cache_params] = jsonschema_obj
65
- else:
66
- logging.info(f"Using cached jsonschema for {schema_id}")
67
- jsonschema_obj = self.jsonschema_objs[cache_params]
68
- return jsonschema.validate(inst_dict, schema=jsonschema_obj, format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER)
82
+ self.validate_dict(inst_dict, target_class.class_name, closed)
69
83
 
70
84
  def validate_dict(
71
85
  self, data: dict, target_class: ClassDefinitionName = None, closed: bool = True
@@ -78,27 +92,35 @@ class JsonSchemaDataValidator(DataValidator):
78
92
  :param closed:
79
93
  :return:
80
94
  """
81
- not_closed = not closed
95
+ results = list(self.iter_validate_dict(data, target_class, closed))
96
+ if results:
97
+ raise JsonSchemaDataValidatorError(results)
98
+
99
+ def iter_validate_dict(
100
+ self, data: dict, target_class_name: ClassDefinitionName = None, closed: bool = True
101
+ ) -> Iterable[str]:
82
102
  if self.schema is None:
83
- raise ValueError(f"schema object must be set")
84
- if target_class is None:
103
+ raise ValueError("schema object must be set")
104
+ if target_class_name is None:
85
105
  roots = [c.name for c in self.schema.classes.values() if c.tree_root]
86
106
  if len(roots) != 1:
87
107
  raise ValueError(f"Cannot determine tree root: {roots}")
88
- target_class = roots[0]
89
- jsonschemastr = JsonSchemaGenerator(
90
- self.schema,
91
- mergeimports=True,
92
- top_class=target_class,
93
- not_closed=not_closed,
94
- ).serialize(not_closed=not_closed)
95
- jsonschema_obj = json.loads(jsonschemastr)
96
- return jsonschema.validate(data, schema=jsonschema_obj, format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER)
108
+ target_class_name = roots[0]
109
+ jsonschema_obj = _generate_jsonschema(
110
+ self._hashable_schema, target_class_name, closed, self.include_range_class_descendants
111
+ )
112
+ validator = jsonschema.Draft7Validator(
113
+ jsonschema_obj, format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER
114
+ )
115
+ for error in validator.iter_errors(data):
116
+ best_error = best_match([error])
117
+ # TODO: This should return some kind of standard validation result
118
+ # object, but until that is defined just yield string messages
119
+ yield f"{best_error.message} in {best_error.json_path}"
97
120
 
98
121
 
99
122
  @click.command()
100
123
  @click.option("--module", "-m", help="Path to python datamodel module")
101
- @click.option("--output", "-o", help="Path to output file")
102
124
  @click.option(
103
125
  "--input-format",
104
126
  "-f",
@@ -110,20 +132,32 @@ class JsonSchemaDataValidator(DataValidator):
110
132
  "-C",
111
133
  help="name of class in datamodel that the root node instantiates",
112
134
  )
135
+ @click.option("--index-slot", "-S", help="top level slot. Required for CSV dumping/loading")
136
+ @click.option("--schema", "-s", help="Path to schema specified as LinkML yaml")
113
137
  @click.option(
114
- "--index-slot", "-S", help="top level slot. Required for CSV dumping/loading"
138
+ "--exit-on-first-failure/--no-exit-on-first-failure",
139
+ default=False,
140
+ help="Exit after the first validation failure is found. If not specified all validation failures are reported.",
141
+ )
142
+ @click.option(
143
+ "--include-range-class-descendants/--no-range-class-descendants",
144
+ default=False,
145
+ show_default=False,
146
+ help="""
147
+ When handling range constraints, include all descendants of the range class instead of just the range class
148
+ """,
115
149
  )
116
- @click.option("--schema", "-s", help="Path to schema specified as LinkML yaml")
117
150
  @click.argument("input")
118
151
  @click.version_option(__version__, "-V", "--version")
119
152
  def cli(
120
153
  input,
121
154
  module,
122
155
  target_class,
123
- output=None,
124
156
  input_format=None,
125
157
  schema=None,
126
158
  index_slot=None,
159
+ exit_on_first_failure=False,
160
+ include_range_class_descendants=False,
127
161
  ) -> None:
128
162
  """
129
163
  Validates instance data
@@ -140,13 +174,12 @@ def cli(
140
174
  if target_class is None:
141
175
  target_class = datautils.infer_root_class(sv)
142
176
  if target_class is None:
143
- raise Exception(f"target class not specified and could not be inferred")
177
+ raise Exception("target class not specified and could not be inferred")
144
178
  py_target_class = python_module.__dict__[target_class]
145
179
  input_format = datautils._get_format(input, input_format)
146
180
  loader = datautils.get_loader(input_format)
147
181
 
148
182
  inargs = {}
149
- outargs = {}
150
183
  if datautils._is_xsv(input_format):
151
184
  if index_slot is None:
152
185
  index_slot = datautils.infer_index_slot(sv, target_class)
@@ -157,15 +190,33 @@ def cli(
157
190
  if datautils._is_rdf_format(input_format):
158
191
  inargs["schemaview"] = sv
159
192
  inargs["fmt"] = input_format
160
- obj = loader.load(source=input, target_class=py_target_class, **inargs)
193
+
194
+ try:
195
+ data_as_dict = loader.load_as_dict(source=input, **inargs)
196
+ except NotImplementedError:
197
+ obj = loader.load(source=input, target_class=py_target_class, **inargs)
198
+ data_as_dict = as_simple_dict(obj)
199
+
161
200
  # Validation
162
201
  if schema is None:
163
- raise Exception(
164
- "--schema must be passed in order to validate. Suppress with --no-validate"
165
- )
166
- validator = JsonSchemaDataValidator(schema)
167
- results = validator.validate_object(obj, target_class=py_target_class)
168
- print(results)
202
+ raise Exception("--schema must be passed in order to validate. Suppress with --no-validate")
203
+
204
+ validator = JsonSchemaDataValidator(
205
+ schema, include_range_class_descendants=include_range_class_descendants
206
+ )
207
+ error_count = 0
208
+ for error in validator.iter_validate_dict(
209
+ data_as_dict, target_class_name=py_target_class.class_name
210
+ ):
211
+ error_count += 1
212
+ click.echo(click.style("\u2717 ", fg="red") + error)
213
+ if exit_on_first_failure:
214
+ sys.exit(1)
215
+
216
+ if not error_count:
217
+ click.echo(click.style("\u2713 ", fg="green") + "No problems found")
218
+
219
+ sys.exit(0 if error_count == 0 else 1)
169
220
 
170
221
 
171
222
  if __name__ == "__main__":
@@ -7,15 +7,12 @@ import click
7
7
  from linkml_runtime.linkml_model import SchemaDefinition
8
8
  from linkml_runtime.utils.schemaview import SchemaView
9
9
  from rdflib import Graph
10
- from SPARQLWrapper import (JSON, N3, RDF, RDFXML, TURTLE, SPARQLWrapper,
11
- SPARQLWrapper2)
10
+ from SPARQLWrapper import JSON, SPARQLWrapper
12
11
 
13
12
  from linkml._version import __version__
14
13
  from linkml.generators.sparqlgen import SparqlGenerator
15
- from linkml.generators.yamlgen import YAMLGenerator
16
14
  from linkml.reporting import CheckResult, Report
17
- from linkml.utils.datautils import (_get_format, dumpers_loaders, get_dumper,
18
- get_loader)
15
+ from linkml.utils.datautils import _get_format, dumpers_loaders, get_dumper
19
16
  from linkml.utils.datavalidator import DataValidator
20
17
 
21
18
 
@@ -34,7 +31,6 @@ def _make_result(row):
34
31
 
35
32
  @dataclass
36
33
  class SparqlDataValidator(DataValidator):
37
-
38
34
  schema: SchemaDefinition = None
39
35
  queries: dict = None
40
36
 
@@ -90,9 +86,7 @@ class SparqlDataValidator(DataValidator):
90
86
 
91
87
 
92
88
  @click.command()
93
- @click.option(
94
- "--named-graph", "-G", multiple=True, help="Constrain query to a named graph"
95
- )
89
+ @click.option("--named-graph", "-G", multiple=True, help="Constrain query to a named graph")
96
90
  @click.option("--input", "-i", help="Input file to validate")
97
91
  @click.option("--endpoint-url", "-U", help="URL of sparql endpoint")
98
92
  @click.option("--limit", "-L", help="Max results per query")
@@ -128,16 +122,12 @@ def cli(
128
122
 
129
123
  linkml-sparql-validate -U http://sparql.hegroup.org/sparql -s tests/test_validation/input/omo.yaml
130
124
  """
131
- if schema is not None:
132
- sv = SchemaView(schema)
133
125
  validator = SparqlDataValidator(schema)
134
126
  if endpoint_url is not None:
135
- results = validator.validate_endpoint(
136
- endpoint_url, limit=limit, named_graphs=named_graph
137
- )
127
+ results = validator.validate_endpoint(endpoint_url, limit=limit, named_graphs=named_graph)
138
128
  else:
139
129
  if input is None:
140
- raise Exception(f"Must pass one of --endpoint-url OR --input")
130
+ raise Exception("Must pass one of --endpoint-url OR --input")
141
131
  input_format = _get_format(input, input_format)
142
132
  results = validator.validate_file(input, format=input_format)
143
133
  output_format = _get_format(output, output_format, default="json")
@@ -8,26 +8,17 @@
8
8
  # license: https://creativecommons.org/publicdomain/zero/1.0/
9
9
 
10
10
  import dataclasses
11
- import re
12
- import sys
13
11
  from dataclasses import dataclass
14
12
  from typing import Any, ClassVar, Dict, List, Optional, Union
15
13
 
16
- from jsonasobj2 import JsonObj, as_dict
17
- from linkml_runtime.linkml_model.meta import (EnumDefinition, PermissibleValue,
18
- PvFormulaOptions)
19
- from linkml_runtime.linkml_model.types import Boolean, Datetime, String
14
+ from jsonasobj2 import as_dict
15
+ from linkml_runtime.linkml_model.types import String
20
16
  from linkml_runtime.utils.curienamespace import CurieNamespace
21
- from linkml_runtime.utils.dataclass_extensions_376 import \
22
- dataclasses_init_fn_with_kwargs
23
- from linkml_runtime.utils.enumerations import EnumDefinitionImpl
24
- from linkml_runtime.utils.formatutils import camelcase, sfx, underscore
25
- from linkml_runtime.utils.metamodelcore import (Bool, XSDDateTime, bnode,
26
- empty_dict, empty_list)
17
+ from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs
18
+ from linkml_runtime.utils.metamodelcore import Bool, XSDDateTime, empty_dict, empty_list
27
19
  from linkml_runtime.utils.slot import Slot
28
- from linkml_runtime.utils.yamlutils import (YAMLRoot, extended_float,
29
- extended_int, extended_str)
30
- from rdflib import Namespace, URIRef
20
+ from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str
21
+ from rdflib import URIRef
31
22
 
32
23
  metamodel_version = "1.7.0"
33
24
  version = None
@@ -41,9 +32,7 @@ DCAT = CurieNamespace("dcat", "http://www.w3.org/ns/dcat#")
41
32
  FORMATS = CurieNamespace("formats", "http://www.w3.org/ns/formats/")
42
33
  FRICTIONLESS = CurieNamespace("frictionless", "https://specs.frictionlessdata.io/")
43
34
  LINKML = CurieNamespace("linkml", "https://w3id.org/linkml/")
44
- MEDIATYPES = CurieNamespace(
45
- "mediatypes", "https://www.iana.org/assignments/media-types/"
46
- )
35
+ MEDIATYPES = CurieNamespace("mediatypes", "https://www.iana.org/assignments/media-types/")
47
36
  OWL = CurieNamespace("owl", "http://www.w3.org/2002/07/owl#")
48
37
  PAV = CurieNamespace("pav", "http://purl.org/pav/")
49
38
  RDF = CurieNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
@@ -68,7 +57,8 @@ class FileSystemPath(String):
68
57
 
69
58
 
70
59
  class ProjectName(String):
71
- """A project name MUST contain no whitespace and SHOULD only contains alphanumeric characters and hyphens (no underscores)"""
60
+ """A project name MUST contain no whitespace and SHOULD only contains
61
+ alphanumeric characters and hyphens (no underscores)"""
72
62
 
73
63
  type_class_uri = XSD.string
74
64
  type_class_curie = "xsd:string"
@@ -133,13 +123,9 @@ class Project(YAMLRoot):
133
123
  if self.github_organization is not None and not isinstance(
134
124
  self.github_organization, GitHubAccount
135
125
  ):
136
- self.github_organization = GitHubAccount(
137
- **as_dict(self.github_organization)
138
- )
126
+ self.github_organization = GitHubAccount(**as_dict(self.github_organization))
139
127
 
140
- if self.creation_date is not None and not isinstance(
141
- self.creation_date, XSDDateTime
142
- ):
128
+ if self.creation_date is not None and not isinstance(self.creation_date, XSDDateTime):
143
129
  self.creation_date = XSDDateTime(self.creation_date)
144
130
 
145
131
  if self.description is not None and not isinstance(self.description, str):
@@ -153,8 +139,7 @@ class Project(YAMLRoot):
153
139
  if not isinstance(self.data_files, list):
154
140
  self.data_files = [self.data_files] if self.data_files is not None else []
155
141
  self.data_files = [
156
- v if isinstance(v, FileSystemPath) else FileSystemPath(v)
157
- for v in self.data_files
142
+ v if isinstance(v, FileSystemPath) else FileSystemPath(v) for v in self.data_files
158
143
  ]
159
144
 
160
145
  self._normalize_inlined_as_dict(
@@ -259,9 +244,7 @@ class Workspace(YAMLRoot):
259
244
  slot_name="projects", slot_type=Project, key_name="name", keyed=True
260
245
  )
261
246
 
262
- if self.github_account is not None and not isinstance(
263
- self.github_account, GitHubAccount
264
- ):
247
+ if self.github_account is not None and not isinstance(self.github_account, GitHubAccount):
265
248
  self.github_account = GitHubAccount(**as_dict(self.github_account))
266
249
 
267
250
  if self.projects_directory is not None and not isinstance(