cognite-neat 0.98.0__py3-none-any.whl → 0.99.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/__init__.py +4 -0
- cognite/neat/_client/_api/data_modeling_loaders.py +585 -0
- cognite/neat/_client/_api/schema.py +111 -0
- cognite/neat/_client/_api_client.py +17 -0
- cognite/neat/_client/data_classes/__init__.py +0 -0
- cognite/neat/{_utils/cdf/data_classes.py → _client/data_classes/data_modeling.py} +8 -135
- cognite/neat/_client/data_classes/schema.py +495 -0
- cognite/neat/_constants.py +27 -4
- cognite/neat/_graph/_shared.py +14 -15
- cognite/neat/_graph/extractors/_classic_cdf/_assets.py +14 -154
- cognite/neat/_graph/extractors/_classic_cdf/_base.py +154 -7
- cognite/neat/_graph/extractors/_classic_cdf/_classic.py +25 -14
- cognite/neat/_graph/extractors/_classic_cdf/_data_sets.py +17 -92
- cognite/neat/_graph/extractors/_classic_cdf/_events.py +13 -162
- cognite/neat/_graph/extractors/_classic_cdf/_files.py +15 -179
- cognite/neat/_graph/extractors/_classic_cdf/_labels.py +32 -100
- cognite/neat/_graph/extractors/_classic_cdf/_relationships.py +27 -178
- cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +14 -139
- cognite/neat/_graph/extractors/_classic_cdf/_timeseries.py +15 -173
- cognite/neat/_graph/extractors/_rdf_file.py +6 -7
- cognite/neat/_graph/loaders/_rdf2dms.py +2 -2
- cognite/neat/_graph/queries/_base.py +17 -1
- cognite/neat/_graph/transformers/_classic_cdf.py +74 -147
- cognite/neat/_graph/transformers/_prune_graph.py +1 -1
- cognite/neat/_graph/transformers/_rdfpath.py +1 -1
- cognite/neat/_issues/_base.py +26 -17
- cognite/neat/_issues/errors/__init__.py +4 -2
- cognite/neat/_issues/errors/_external.py +7 -0
- cognite/neat/_issues/errors/_properties.py +2 -7
- cognite/neat/_issues/errors/_resources.py +1 -1
- cognite/neat/_issues/warnings/__init__.py +8 -0
- cognite/neat/_issues/warnings/_external.py +16 -0
- cognite/neat/_issues/warnings/_properties.py +16 -0
- cognite/neat/_issues/warnings/_resources.py +26 -2
- cognite/neat/_issues/warnings/user_modeling.py +4 -4
- cognite/neat/_rules/_constants.py +8 -11
- cognite/neat/_rules/analysis/_base.py +8 -4
- cognite/neat/_rules/exporters/_base.py +3 -4
- cognite/neat/_rules/exporters/_rules2dms.py +33 -46
- cognite/neat/_rules/importers/__init__.py +1 -3
- cognite/neat/_rules/importers/_base.py +1 -1
- cognite/neat/_rules/importers/_dms2rules.py +6 -29
- cognite/neat/_rules/importers/_rdf/__init__.py +5 -0
- cognite/neat/_rules/importers/_rdf/_base.py +34 -11
- cognite/neat/_rules/importers/_rdf/_imf2rules.py +91 -0
- cognite/neat/_rules/importers/_rdf/_inference2rules.py +43 -35
- cognite/neat/_rules/importers/_rdf/_owl2rules.py +80 -0
- cognite/neat/_rules/importers/_rdf/_shared.py +138 -441
- cognite/neat/_rules/models/__init__.py +1 -1
- cognite/neat/_rules/models/_base_rules.py +22 -12
- cognite/neat/_rules/models/dms/__init__.py +4 -2
- cognite/neat/_rules/models/dms/_exporter.py +45 -48
- cognite/neat/_rules/models/dms/_rules.py +20 -17
- cognite/neat/_rules/models/dms/_rules_input.py +52 -8
- cognite/neat/_rules/models/dms/_validation.py +391 -119
- cognite/neat/_rules/models/entities/_single_value.py +32 -4
- cognite/neat/_rules/models/information/__init__.py +2 -0
- cognite/neat/_rules/models/information/_rules.py +0 -67
- cognite/neat/_rules/models/information/_validation.py +9 -9
- cognite/neat/_rules/models/mapping/__init__.py +2 -3
- cognite/neat/_rules/models/mapping/_classic2core.py +36 -146
- cognite/neat/_rules/models/mapping/_classic2core.yaml +343 -0
- cognite/neat/_rules/transformers/__init__.py +2 -2
- cognite/neat/_rules/transformers/_converters.py +110 -11
- cognite/neat/_rules/transformers/_mapping.py +105 -30
- cognite/neat/_rules/transformers/_pipelines.py +1 -1
- cognite/neat/_rules/transformers/_verification.py +31 -3
- cognite/neat/_session/_base.py +24 -8
- cognite/neat/_session/_drop.py +35 -0
- cognite/neat/_session/_inspect.py +17 -5
- cognite/neat/_session/_mapping.py +39 -0
- cognite/neat/_session/_prepare.py +219 -23
- cognite/neat/_session/_read.py +49 -12
- cognite/neat/_session/_to.py +8 -5
- cognite/neat/_session/exceptions.py +4 -0
- cognite/neat/_store/_base.py +27 -24
- cognite/neat/_utils/rdf_.py +34 -5
- cognite/neat/_version.py +1 -1
- cognite/neat/_workflows/steps/lib/current/rules_exporter.py +5 -88
- cognite/neat/_workflows/steps/lib/current/rules_importer.py +3 -14
- cognite/neat/_workflows/steps/lib/current/rules_validator.py +6 -7
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/METADATA +3 -3
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/RECORD +87 -92
- cognite/neat/_rules/importers/_rdf/_imf2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +0 -86
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +0 -29
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +0 -130
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2rules.py +0 -154
- cognite/neat/_rules/importers/_rdf/_owl2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +0 -58
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +0 -65
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +0 -59
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2rules.py +0 -39
- cognite/neat/_rules/models/dms/_schema.py +0 -1101
- cognite/neat/_rules/models/mapping/_base.py +0 -131
- cognite/neat/_utils/cdf/loaders/__init__.py +0 -25
- cognite/neat/_utils/cdf/loaders/_base.py +0 -54
- cognite/neat/_utils/cdf/loaders/_data_modeling.py +0 -339
- cognite/neat/_utils/cdf/loaders/_ingestion.py +0 -167
- /cognite/neat/{_utils/cdf → _client/_api}/__init__.py +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/LICENSE +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/WHEEL +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"""This module performs importing of various formats to one of serializations for which
|
|
2
|
+
there are loaders to TransformationRules pydantic class."""
|
|
3
|
+
|
|
4
|
+
from cognite.neat._rules.importers._rdf._base import BaseRDFImporter
|
|
5
|
+
from cognite.neat._rules.importers._rdf._shared import parse_classes, parse_properties
|
|
6
|
+
|
|
7
|
+
CLASSES_QUERY = """
|
|
8
|
+
SELECT ?class_ ?name ?description ?implements
|
|
9
|
+
WHERE {{
|
|
10
|
+
VALUES ?type {{ imf:BlockType imf:TerminalType imf:AttributeType }}
|
|
11
|
+
?class_ a ?type .
|
|
12
|
+
|
|
13
|
+
OPTIONAL {{?class_ rdfs:subClassOf ?parent }}.
|
|
14
|
+
OPTIONAL {{?class_ rdfs:label|skos:prefLabel ?name }}.
|
|
15
|
+
OPTIONAL {{?class_ rdfs:comment|skos:definition ?description}}.
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# Add imf:Attribute as parent class when no parent is found
|
|
19
|
+
BIND(IF(!bound(?parent) && ?type = imf:AttributeType, imf:Attribute, ?parent) AS ?implements)
|
|
20
|
+
|
|
21
|
+
# FILTERS
|
|
22
|
+
FILTER (!isBlank(?class_))
|
|
23
|
+
FILTER (!bound(?implements) || !isBlank(?implements))
|
|
24
|
+
|
|
25
|
+
FILTER (!bound(?name) || LANG(?name) = "" || LANGMATCHES(LANG(?name), "{language}"))
|
|
26
|
+
FILTER (!bound(?description) || LANG(?description) = "" || LANGMATCHES(LANG(?description), "{language}"))
|
|
27
|
+
}}
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
PROPERTIES_QUERY = """
|
|
31
|
+
SELECT ?class_ ?property_ ?name ?description ?value_type ?min_count ?max_count ?default
|
|
32
|
+
WHERE
|
|
33
|
+
{{
|
|
34
|
+
# CASE 1: Handling Blocks and Terminals
|
|
35
|
+
{{
|
|
36
|
+
VALUES ?type {{ imf:BlockType imf:TerminalType }}
|
|
37
|
+
?class_ a ?type ;
|
|
38
|
+
sh:property ?propertyShape .
|
|
39
|
+
?propertyShape sh:path ?property_ .
|
|
40
|
+
|
|
41
|
+
OPTIONAL {{ ?property_ skos:prefLabel ?name . }}
|
|
42
|
+
OPTIONAL {{ ?property_ skos:definition ?description . }}
|
|
43
|
+
OPTIONAL {{ ?property_ rdfs:range ?range . }}
|
|
44
|
+
|
|
45
|
+
OPTIONAL {{ ?propertyShape sh:minCount ?min_count . }}
|
|
46
|
+
OPTIONAL {{ ?propertyShape sh:maxCount ?max_count . }}
|
|
47
|
+
OPTIONAL {{ ?propertyShape sh:hasValue ?default . }}
|
|
48
|
+
OPTIONAL {{ ?propertyShape sh:class | sh:qualifiedValueShape/sh:class ?valueShape . }}
|
|
49
|
+
}}
|
|
50
|
+
|
|
51
|
+
UNION
|
|
52
|
+
|
|
53
|
+
# CASE 2: Handling Attributes
|
|
54
|
+
{{
|
|
55
|
+
?class_ a imf:AttributeType .
|
|
56
|
+
BIND(xsd:anyURI AS ?valueShape)
|
|
57
|
+
BIND(imf:predicate AS ?property_)
|
|
58
|
+
?class_ ?property_ ?defaultURI .
|
|
59
|
+
BIND(STR(?defaultURI) AS ?default)
|
|
60
|
+
|
|
61
|
+
}}
|
|
62
|
+
|
|
63
|
+
# Set the value type for the property based on sh:class, sh:qualifiedValueType or rdfs:range
|
|
64
|
+
BIND(IF(BOUND(?valueShape), ?valueShape, IF(BOUND(?range) , ?range , ?valueShape)) AS ?value_type)
|
|
65
|
+
|
|
66
|
+
FILTER (!isBlank(?property_))
|
|
67
|
+
FILTER (!bound(?class_) || !isBlank(?class_))
|
|
68
|
+
FILTER (!bound(?name) || LANG(?name) = "" || LANGMATCHES(LANG(?name), "{language}"))
|
|
69
|
+
FILTER (!bound(?description) || LANG(?description) = "" || LANGMATCHES(LANG(?description), "{language}"))
|
|
70
|
+
}}
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class IMFImporter(BaseRDFImporter):
|
|
75
|
+
"""Convert IMF Types provided as SHACL shapes to Input Rules."""
|
|
76
|
+
|
|
77
|
+
def _to_rules_components(
|
|
78
|
+
self,
|
|
79
|
+
) -> dict:
|
|
80
|
+
classes, issue_list = parse_classes(self.graph, CLASSES_QUERY, self.language, self.issue_list)
|
|
81
|
+
self.issue_list = issue_list
|
|
82
|
+
properties, issue_list = parse_properties(self.graph, PROPERTIES_QUERY, self.language, self.issue_list)
|
|
83
|
+
self.issue_list = issue_list
|
|
84
|
+
|
|
85
|
+
components = {
|
|
86
|
+
"Metadata": self._metadata,
|
|
87
|
+
"Classes": list(classes.values()) if classes else [],
|
|
88
|
+
"Properties": list(properties.values()) if properties else [],
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return components
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from collections import Counter, defaultdict
|
|
2
2
|
from collections.abc import Mapping
|
|
3
|
-
from datetime import datetime
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import ClassVar, cast
|
|
6
6
|
|
|
@@ -8,8 +8,7 @@ from cognite.client import data_modeling as dm
|
|
|
8
8
|
from rdflib import RDF, Namespace, URIRef
|
|
9
9
|
from rdflib import Literal as RdfLiteral
|
|
10
10
|
|
|
11
|
-
from cognite.neat.
|
|
12
|
-
from cognite.neat._issues.warnings import PropertyValueTypeUndefinedWarning
|
|
11
|
+
from cognite.neat._issues.warnings import PropertySkippedWarning, PropertyValueTypeUndefinedWarning
|
|
13
12
|
from cognite.neat._rules.models import data_types
|
|
14
13
|
from cognite.neat._rules.models.data_types import AnyURI
|
|
15
14
|
from cognite.neat._rules.models.entities._single_value import UnknownEntity
|
|
@@ -70,8 +69,15 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
70
69
|
data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_INFERENCE_DATA_MODEL_ID,
|
|
71
70
|
max_number_of_instance: int = -1,
|
|
72
71
|
non_existing_node_type: UnknownEntity | AnyURI = DEFAULT_NON_EXISTING_NODE_TYPE,
|
|
72
|
+
language: str = "en",
|
|
73
73
|
) -> "InferenceImporter":
|
|
74
|
-
return super().from_graph_store(
|
|
74
|
+
return super().from_graph_store(
|
|
75
|
+
store,
|
|
76
|
+
data_model_id,
|
|
77
|
+
max_number_of_instance,
|
|
78
|
+
non_existing_node_type,
|
|
79
|
+
language,
|
|
80
|
+
)
|
|
75
81
|
|
|
76
82
|
@classmethod
|
|
77
83
|
def from_file(
|
|
@@ -80,8 +86,15 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
80
86
|
data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_INFERENCE_DATA_MODEL_ID,
|
|
81
87
|
max_number_of_instance: int = -1,
|
|
82
88
|
non_existing_node_type: UnknownEntity | AnyURI = DEFAULT_NON_EXISTING_NODE_TYPE,
|
|
89
|
+
language: str = "en",
|
|
83
90
|
) -> "InferenceImporter":
|
|
84
|
-
return super().from_file(
|
|
91
|
+
return super().from_file(
|
|
92
|
+
filepath,
|
|
93
|
+
data_model_id,
|
|
94
|
+
max_number_of_instance,
|
|
95
|
+
non_existing_node_type,
|
|
96
|
+
language,
|
|
97
|
+
)
|
|
85
98
|
|
|
86
99
|
@classmethod
|
|
87
100
|
def from_json_file(
|
|
@@ -89,6 +102,7 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
89
102
|
filepath: Path,
|
|
90
103
|
data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_INFERENCE_DATA_MODEL_ID,
|
|
91
104
|
max_number_of_instance: int = -1,
|
|
105
|
+
language: str = "en",
|
|
92
106
|
) -> "InferenceImporter":
|
|
93
107
|
raise NotImplementedError("JSON file format is not supported yet.")
|
|
94
108
|
|
|
@@ -98,6 +112,7 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
98
112
|
filepath: Path,
|
|
99
113
|
data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_INFERENCE_DATA_MODEL_ID,
|
|
100
114
|
max_number_of_instance: int = -1,
|
|
115
|
+
language: str = "en",
|
|
101
116
|
) -> "InferenceImporter":
|
|
102
117
|
raise NotImplementedError("YAML file format is not supported yet.")
|
|
103
118
|
|
|
@@ -156,9 +171,21 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
156
171
|
# this is to skip rdf:type property
|
|
157
172
|
if property_uri == RDF.type:
|
|
158
173
|
continue
|
|
174
|
+
property_id = remove_namespace_from_uri(property_uri)
|
|
175
|
+
if property_id in {"external_id", "externalId"}:
|
|
176
|
+
skip_issue = PropertySkippedWarning(
|
|
177
|
+
resource_type="Property",
|
|
178
|
+
identifier=f"{class_id}:{property_id}",
|
|
179
|
+
property_name=property_id,
|
|
180
|
+
reason="External ID is assumed to be the unique identifier of the instance "
|
|
181
|
+
"and is not part of the data model schema.",
|
|
182
|
+
)
|
|
183
|
+
if skip_issue not in self.issue_list:
|
|
184
|
+
self.issue_list.append(skip_issue)
|
|
185
|
+
continue
|
|
159
186
|
|
|
160
187
|
self._add_uri_namespace_to_prefixes(cast(URIRef, property_uri), prefixes)
|
|
161
|
-
|
|
188
|
+
|
|
162
189
|
if isinstance(data_type_uri, URIRef):
|
|
163
190
|
data_type_uri = self.overwrite_data_types.get(data_type_uri, data_type_uri)
|
|
164
191
|
|
|
@@ -198,11 +225,12 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
198
225
|
|
|
199
226
|
# USE CASE 1: If property is not present in properties
|
|
200
227
|
if id_ not in properties:
|
|
228
|
+
definition["value_type"] = {definition["value_type"]}
|
|
201
229
|
properties[id_] = definition
|
|
202
230
|
|
|
203
231
|
# USE CASE 2: first time redefinition, value type change to multi
|
|
204
232
|
elif id_ in properties and definition["value_type"] not in properties[id_]["value_type"]:
|
|
205
|
-
properties[id_]["value_type"]
|
|
233
|
+
properties[id_]["value_type"].add(definition["value_type"])
|
|
206
234
|
|
|
207
235
|
# USE CASE 3: existing but max count is different
|
|
208
236
|
elif (
|
|
@@ -212,32 +240,12 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
212
240
|
):
|
|
213
241
|
properties[id_]["max_count"] = max(properties[id_]["max_count"], definition["max_count"])
|
|
214
242
|
|
|
215
|
-
#
|
|
216
|
-
for
|
|
217
|
-
if
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
count_by_value_type = count_by_value_type_by_property[id_]
|
|
221
|
-
count_list = sorted(count_by_value_type.items(), key=lambda item: item[1], reverse=True)
|
|
222
|
-
# Make the comment more readable by adapting to the number of value types
|
|
223
|
-
base_string = "<{value_type}> which occurs <{count}> times"
|
|
224
|
-
if len(count_list) == 1:
|
|
225
|
-
type_, count = count_list[0]
|
|
226
|
-
counts_str = f"with value type {base_string.format(value_type=type_, count=count)} in the graph"
|
|
227
|
-
elif len(count_list) == 2:
|
|
228
|
-
first = base_string.format(value_type=count_list[0][0], count=count_list[0][1])
|
|
229
|
-
second = base_string.format(value_type=count_list[1][0], count=count_list[1][1])
|
|
230
|
-
counts_str = f"with value types {first} and {second} in the graph"
|
|
243
|
+
# Create multi-value properties otherwise single value
|
|
244
|
+
for property_ in properties.values():
|
|
245
|
+
if len(property_["value_type"]) > 1:
|
|
246
|
+
property_["value_type"] = " | ".join([str(t) for t in property_["value_type"]])
|
|
231
247
|
else:
|
|
232
|
-
|
|
233
|
-
base_string.format(value_type=type_, count=count) for type_, count in count_list[:-1]
|
|
234
|
-
)
|
|
235
|
-
last = base_string.format(value_type=count_list[-1][0], count=count_list[-1][1])
|
|
236
|
-
counts_str = f"with value types {first_part} and {last} in the graph"
|
|
237
|
-
|
|
238
|
-
class_id = property_["class_"]
|
|
239
|
-
property_id = property_["property_"]
|
|
240
|
-
property_["comment"] = f"Class <{class_id}> has property <{property_id}> {counts_str}"
|
|
248
|
+
property_["value_type"] = next(iter(property_["value_type"]))
|
|
241
249
|
|
|
242
250
|
return {
|
|
243
251
|
"metadata": self._default_metadata().model_dump(),
|
|
@@ -247,14 +255,14 @@ class InferenceImporter(BaseRDFImporter):
|
|
|
247
255
|
}
|
|
248
256
|
|
|
249
257
|
def _default_metadata(self):
|
|
258
|
+
now = datetime.now(timezone.utc)
|
|
250
259
|
return InformationMetadata(
|
|
251
260
|
space=self.data_model_id.space,
|
|
252
261
|
external_id=self.data_model_id.external_id,
|
|
253
262
|
version=self.data_model_id.version,
|
|
254
263
|
name="Inferred Model",
|
|
255
264
|
creator="NEAT",
|
|
256
|
-
created=
|
|
257
|
-
updated=
|
|
265
|
+
created=now,
|
|
266
|
+
updated=now,
|
|
258
267
|
description="Inferred model from knowledge graph",
|
|
259
|
-
namespace=DEFAULT_NAMESPACE,
|
|
260
268
|
)
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""This module performs importing of various formats to one of serializations for which
|
|
2
|
+
there are loaders to TransformationRules pydantic class."""
|
|
3
|
+
|
|
4
|
+
from cognite.neat._rules.importers._rdf._base import BaseRDFImporter
|
|
5
|
+
from cognite.neat._rules.importers._rdf._shared import parse_classes, parse_properties
|
|
6
|
+
|
|
7
|
+
CLASSES_QUERY = """SELECT ?class_ ?name ?description ?implements
|
|
8
|
+
WHERE {{
|
|
9
|
+
|
|
10
|
+
?class_ a owl:Class .
|
|
11
|
+
OPTIONAL {{?class_ rdfs:subClassOf ?implements }}.
|
|
12
|
+
OPTIONAL {{?class_ rdfs:label|skos:prefLabel ?name }}.
|
|
13
|
+
OPTIONAL {{?class_ rdfs:comment|skos:definition ?description}} .
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
FILTER (!isBlank(?class_))
|
|
17
|
+
FILTER (!bound(?implements) || !isBlank(?implements))
|
|
18
|
+
|
|
19
|
+
FILTER (!bound(?name) || LANG(?name) = "" || LANGMATCHES(LANG(?name), "{language}"))
|
|
20
|
+
FILTER (!bound(?description) || LANG(?description) = "" || LANGMATCHES(LANG(?description), "{language}"))
|
|
21
|
+
|
|
22
|
+
}}
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
PROPERTIES_QUERY = """
|
|
26
|
+
|
|
27
|
+
SELECT ?class_ ?property_ ?name ?description ?value_type ?minCount ?maxCount ?default
|
|
28
|
+
WHERE {{
|
|
29
|
+
?property_ a ?property_Type.
|
|
30
|
+
FILTER (?property_Type IN (owl:ObjectProperty, owl:DatatypeProperty ) )
|
|
31
|
+
OPTIONAL {{?property_ rdfs:domain ?class_ }}.
|
|
32
|
+
OPTIONAL {{?property_ rdfs:range ?value_type }}.
|
|
33
|
+
OPTIONAL {{?property_ rdfs:label|skos:prefLabel ?name }}.
|
|
34
|
+
OPTIONAL {{?property_ rdfs:comment|skos:definition ?description}}.
|
|
35
|
+
OPTIONAL {{?property_ owl:maxCardinality ?maxCount}}.
|
|
36
|
+
OPTIONAL {{?property_ owl:minCardinality ?minCount}}.
|
|
37
|
+
|
|
38
|
+
# FILTERS
|
|
39
|
+
FILTER (!isBlank(?property_))
|
|
40
|
+
FILTER (!bound(?name) || LANG(?name) = "" || LANGMATCHES(LANG(?name), "{language}"))
|
|
41
|
+
FILTER (!bound(?description) || LANG(?description) = "" || LANGMATCHES(LANG(?description), "{language}"))
|
|
42
|
+
}}
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class OWLImporter(BaseRDFImporter):
|
|
47
|
+
"""Convert OWL ontology to tables/ transformation rules / Excel file.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
filepath: Path to OWL ontology
|
|
51
|
+
|
|
52
|
+
!!! Note
|
|
53
|
+
OWL Ontologies are information models which completeness varies. As such, constructing functional
|
|
54
|
+
data model directly will often be impossible, therefore the produced Rules object will be ill formed.
|
|
55
|
+
To avoid this, neat will automatically attempt to make the imported rules compliant by adding default
|
|
56
|
+
values for missing information, attaching dangling properties to default containers based on the
|
|
57
|
+
property type, etc.
|
|
58
|
+
|
|
59
|
+
One has to be aware that NEAT will be opinionated about how to make the ontology
|
|
60
|
+
compliant, and that the resulting rules may not be what you expect.
|
|
61
|
+
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
def _to_rules_components(
|
|
65
|
+
self,
|
|
66
|
+
) -> dict:
|
|
67
|
+
classes, issue_list = parse_classes(self.graph, CLASSES_QUERY, self.language, self.issue_list)
|
|
68
|
+
self.issue_list = issue_list
|
|
69
|
+
|
|
70
|
+
# NeatError
|
|
71
|
+
properties, issue_list = parse_properties(self.graph, PROPERTIES_QUERY, self.language, self.issue_list)
|
|
72
|
+
self.issue_list = issue_list
|
|
73
|
+
|
|
74
|
+
components = {
|
|
75
|
+
"Metadata": self._metadata,
|
|
76
|
+
"Classes": list(classes.values()) if classes else [],
|
|
77
|
+
"Properties": list(properties.values()) if properties else [],
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return components
|