powerbi-ontology-extractor 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cli/__init__.py +1 -0
- cli/pbi_ontology_cli.py +286 -0
- powerbi_ontology/__init__.py +38 -0
- powerbi_ontology/analyzer.py +420 -0
- powerbi_ontology/chat.py +303 -0
- powerbi_ontology/cli.py +530 -0
- powerbi_ontology/contract_builder.py +269 -0
- powerbi_ontology/dax_parser.py +305 -0
- powerbi_ontology/export/__init__.py +17 -0
- powerbi_ontology/export/contract_to_owl.py +408 -0
- powerbi_ontology/export/fabric_iq.py +243 -0
- powerbi_ontology/export/fabric_iq_to_owl.py +463 -0
- powerbi_ontology/export/json_schema.py +110 -0
- powerbi_ontology/export/ontoguard.py +177 -0
- powerbi_ontology/export/owl.py +522 -0
- powerbi_ontology/extractor.py +368 -0
- powerbi_ontology/mcp_config.py +237 -0
- powerbi_ontology/mcp_models.py +166 -0
- powerbi_ontology/mcp_server.py +1106 -0
- powerbi_ontology/ontology_diff.py +776 -0
- powerbi_ontology/ontology_generator.py +406 -0
- powerbi_ontology/review.py +556 -0
- powerbi_ontology/schema_mapper.py +369 -0
- powerbi_ontology/semantic_debt.py +584 -0
- powerbi_ontology/utils/__init__.py +13 -0
- powerbi_ontology/utils/pbix_reader.py +558 -0
- powerbi_ontology/utils/visualizer.py +332 -0
- powerbi_ontology_extractor-0.1.0.dist-info/METADATA +507 -0
- powerbi_ontology_extractor-0.1.0.dist-info/RECORD +33 -0
- powerbi_ontology_extractor-0.1.0.dist-info/WHEEL +5 -0
- powerbi_ontology_extractor-0.1.0.dist-info/entry_points.txt +4 -0
- powerbi_ontology_extractor-0.1.0.dist-info/licenses/LICENSE +21 -0
- powerbi_ontology_extractor-0.1.0.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,463 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Fabric IQ JSON → OWL Converter
|
|
3
|
+
|
|
4
|
+
Converts Fabric IQ JSON format to OntoGuard-compatible OWL format.
|
|
5
|
+
This enables Power BI ontologies to be validated by OntoGuard semantic firewall.
|
|
6
|
+
|
|
7
|
+
Key mappings:
|
|
8
|
+
- entities → owl:Class
|
|
9
|
+
- properties → owl:DatatypeProperty
|
|
10
|
+
- relationships → owl:ObjectProperty
|
|
11
|
+
- businessRules → Action classes with requiresRole/appliesTo
|
|
12
|
+
- permissions → Action individuals with role constraints
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
from typing import Dict, Optional, Any
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
|
|
19
|
+
from rdflib import Graph, Namespace, Literal, URIRef
|
|
20
|
+
from rdflib.namespace import RDF, RDFS, OWL, XSD
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class FabricIQToOWLConverter:
|
|
26
|
+
"""
|
|
27
|
+
Converts Fabric IQ JSON to OntoGuard-compatible OWL format.
|
|
28
|
+
|
|
29
|
+
This is the bridge between Power BI semantic models and OntoGuard
|
|
30
|
+
semantic firewall for AI agents.
|
|
31
|
+
|
|
32
|
+
Example:
|
|
33
|
+
converter = FabricIQToOWLConverter(fabric_iq_json)
|
|
34
|
+
owl_content = converter.convert(format="xml")
|
|
35
|
+
converter.save("output.owl")
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
# OntoGuard namespace for action rules
|
|
39
|
+
ONTOGUARD_NS = "http://example.org/ontoguard#"
|
|
40
|
+
|
|
41
|
+
def __init__(self, fabric_iq_json: Dict[str, Any], base_uri: Optional[str] = None):
|
|
42
|
+
"""
|
|
43
|
+
Initialize converter.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
fabric_iq_json: Fabric IQ JSON from FabricIQExporter.export()
|
|
47
|
+
base_uri: Optional base URI for the ontology (defaults to ontology name)
|
|
48
|
+
"""
|
|
49
|
+
self.fabric_iq = fabric_iq_json
|
|
50
|
+
self.graph = Graph()
|
|
51
|
+
|
|
52
|
+
# Create namespace
|
|
53
|
+
ontology_name = fabric_iq_json.get("ontologyItem", "powerbi_ontology")
|
|
54
|
+
safe_name = ontology_name.replace(" ", "_").replace("-", "_")
|
|
55
|
+
self.base_uri = base_uri or f"http://example.org/powerbi/{safe_name}#"
|
|
56
|
+
|
|
57
|
+
self.ont = Namespace(self.base_uri)
|
|
58
|
+
self.ontoguard = Namespace(self.ONTOGUARD_NS)
|
|
59
|
+
|
|
60
|
+
# Bind namespaces for cleaner output
|
|
61
|
+
self.graph.bind("ont", self.ont)
|
|
62
|
+
self.graph.bind("ontoguard", self.ontoguard)
|
|
63
|
+
self.graph.bind("owl", OWL)
|
|
64
|
+
self.graph.bind("rdfs", RDFS)
|
|
65
|
+
self.graph.bind("xsd", XSD)
|
|
66
|
+
|
|
67
|
+
def convert(self, format: str = "xml") -> str:
|
|
68
|
+
"""
|
|
69
|
+
Convert Fabric IQ JSON to OWL format.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
format: Output format ("xml", "turtle", "json-ld", "n3")
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
OWL content as string
|
|
76
|
+
"""
|
|
77
|
+
logger.info(f"Converting Fabric IQ to OWL format ({format})")
|
|
78
|
+
|
|
79
|
+
# Add ontology metadata
|
|
80
|
+
self._add_ontology_metadata()
|
|
81
|
+
|
|
82
|
+
# Add base classes (User roles, Action)
|
|
83
|
+
self._add_base_classes()
|
|
84
|
+
|
|
85
|
+
# Add OntoGuard properties (requiresRole, appliesTo)
|
|
86
|
+
self._add_ontoguard_properties()
|
|
87
|
+
|
|
88
|
+
# Convert entities to OWL classes
|
|
89
|
+
for entity in self.fabric_iq.get("entities", []):
|
|
90
|
+
self._add_entity_class(entity)
|
|
91
|
+
|
|
92
|
+
# Convert relationships to OWL object properties
|
|
93
|
+
for rel in self.fabric_iq.get("relationships", []):
|
|
94
|
+
self._add_relationship(rel)
|
|
95
|
+
|
|
96
|
+
# Convert business rules to action rules
|
|
97
|
+
for rule in self.fabric_iq.get("businessRules", []):
|
|
98
|
+
self._add_action_rule(rule)
|
|
99
|
+
|
|
100
|
+
# Add schema bindings as annotations (for drift detection)
|
|
101
|
+
self._add_schema_bindings()
|
|
102
|
+
|
|
103
|
+
return self.graph.serialize(format=format)
|
|
104
|
+
|
|
105
|
+
def _add_ontology_metadata(self):
|
|
106
|
+
"""Add OWL ontology metadata."""
|
|
107
|
+
ontology_uri = URIRef(self.base_uri.rstrip("#"))
|
|
108
|
+
|
|
109
|
+
self.graph.add((ontology_uri, RDF.type, OWL.Ontology))
|
|
110
|
+
self.graph.add((ontology_uri, RDFS.label, Literal(
|
|
111
|
+
self.fabric_iq.get("ontologyItem", "Power BI Ontology")
|
|
112
|
+
)))
|
|
113
|
+
self.graph.add((ontology_uri, RDFS.comment, Literal(
|
|
114
|
+
f"Ontology extracted from {self.fabric_iq.get('source', 'Power BI')}"
|
|
115
|
+
)))
|
|
116
|
+
self.graph.add((ontology_uri, OWL.versionInfo, Literal(
|
|
117
|
+
self.fabric_iq.get("version", "1.0")
|
|
118
|
+
)))
|
|
119
|
+
|
|
120
|
+
# Add extraction timestamp
|
|
121
|
+
extracted_date = self.fabric_iq.get("extractedDate", datetime.now().isoformat())
|
|
122
|
+
self.graph.add((ontology_uri, self.ont.extractedDate, Literal(
|
|
123
|
+
extracted_date, datatype=XSD.dateTime
|
|
124
|
+
)))
|
|
125
|
+
|
|
126
|
+
def _add_base_classes(self):
|
|
127
|
+
"""Add base classes for OntoGuard compatibility."""
|
|
128
|
+
# User class (base for roles)
|
|
129
|
+
user_uri = self.ont.User
|
|
130
|
+
self.graph.add((user_uri, RDF.type, OWL.Class))
|
|
131
|
+
self.graph.add((user_uri, RDFS.label, Literal("User")))
|
|
132
|
+
self.graph.add((user_uri, RDFS.comment, Literal(
|
|
133
|
+
"Base class for all user roles"
|
|
134
|
+
)))
|
|
135
|
+
|
|
136
|
+
# Standard roles (subclasses of User)
|
|
137
|
+
roles = ["Admin", "Analyst", "Viewer", "Editor", "Owner"]
|
|
138
|
+
for role in roles:
|
|
139
|
+
role_uri = self.ont[role]
|
|
140
|
+
self.graph.add((role_uri, RDF.type, OWL.Class))
|
|
141
|
+
self.graph.add((role_uri, RDFS.subClassOf, user_uri))
|
|
142
|
+
self.graph.add((role_uri, RDFS.label, Literal(role)))
|
|
143
|
+
|
|
144
|
+
# Action class (base for all actions)
|
|
145
|
+
action_uri = self.ont.Action
|
|
146
|
+
self.graph.add((action_uri, RDF.type, OWL.Class))
|
|
147
|
+
self.graph.add((action_uri, RDFS.label, Literal("Action")))
|
|
148
|
+
self.graph.add((action_uri, RDFS.comment, Literal(
|
|
149
|
+
"Base class for all actions that can be performed"
|
|
150
|
+
)))
|
|
151
|
+
|
|
152
|
+
# Standard action subclasses
|
|
153
|
+
actions = ["ReadAction", "WriteAction", "DeleteAction", "ExecuteAction"]
|
|
154
|
+
for action in actions:
|
|
155
|
+
action_class_uri = self.ont[action]
|
|
156
|
+
self.graph.add((action_class_uri, RDF.type, OWL.Class))
|
|
157
|
+
self.graph.add((action_class_uri, RDFS.subClassOf, action_uri))
|
|
158
|
+
self.graph.add((action_class_uri, RDFS.label, Literal(action)))
|
|
159
|
+
|
|
160
|
+
def _add_ontoguard_properties(self):
|
|
161
|
+
"""Add OntoGuard action permission properties."""
|
|
162
|
+
# requiresRole property
|
|
163
|
+
requires_role = self.ont.requiresRole
|
|
164
|
+
self.graph.add((requires_role, RDF.type, OWL.ObjectProperty))
|
|
165
|
+
self.graph.add((requires_role, RDFS.label, Literal("requires role")))
|
|
166
|
+
self.graph.add((requires_role, RDFS.comment, Literal(
|
|
167
|
+
"Specifies which user role is required to perform an action"
|
|
168
|
+
)))
|
|
169
|
+
self.graph.add((requires_role, RDFS.domain, self.ont.Action))
|
|
170
|
+
self.graph.add((requires_role, RDFS.range, self.ont.User))
|
|
171
|
+
|
|
172
|
+
# appliesTo property
|
|
173
|
+
applies_to = self.ont.appliesTo
|
|
174
|
+
self.graph.add((applies_to, RDF.type, OWL.ObjectProperty))
|
|
175
|
+
self.graph.add((applies_to, RDFS.label, Literal("applies to")))
|
|
176
|
+
self.graph.add((applies_to, RDFS.comment, Literal(
|
|
177
|
+
"Specifies which entity type an action can be applied to"
|
|
178
|
+
)))
|
|
179
|
+
self.graph.add((applies_to, RDFS.domain, self.ont.Action))
|
|
180
|
+
self.graph.add((applies_to, RDFS.range, OWL.Thing))
|
|
181
|
+
|
|
182
|
+
# requiresApproval property (for business rules)
|
|
183
|
+
requires_approval = self.ont.requiresApproval
|
|
184
|
+
self.graph.add((requires_approval, RDF.type, OWL.ObjectProperty))
|
|
185
|
+
self.graph.add((requires_approval, RDFS.label, Literal("requires approval")))
|
|
186
|
+
self.graph.add((requires_approval, RDFS.comment, Literal(
|
|
187
|
+
"Indicates that an action requires approval from a specific role"
|
|
188
|
+
)))
|
|
189
|
+
self.graph.add((requires_approval, RDFS.domain, self.ont.Action))
|
|
190
|
+
self.graph.add((requires_approval, RDFS.range, self.ont.User))
|
|
191
|
+
|
|
192
|
+
# allowsAction property (for specifying action type)
|
|
193
|
+
allows_action = self.ont.allowsAction
|
|
194
|
+
self.graph.add((allows_action, RDF.type, OWL.DatatypeProperty))
|
|
195
|
+
self.graph.add((allows_action, RDFS.label, Literal("allows action")))
|
|
196
|
+
self.graph.add((allows_action, RDFS.comment, Literal(
|
|
197
|
+
"Specifies the action type: read, create, update, delete"
|
|
198
|
+
)))
|
|
199
|
+
self.graph.add((allows_action, RDFS.domain, self.ont.Action))
|
|
200
|
+
self.graph.add((allows_action, RDFS.range, XSD.string))
|
|
201
|
+
|
|
202
|
+
def _add_entity_class(self, entity: Dict[str, Any]):
|
|
203
|
+
"""Convert Fabric IQ entity to OWL class with properties."""
|
|
204
|
+
entity_name = entity.get("name", "")
|
|
205
|
+
if not entity_name:
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
# Make valid URI
|
|
209
|
+
safe_name = self._safe_uri_name(entity_name)
|
|
210
|
+
entity_uri = self.ont[safe_name]
|
|
211
|
+
|
|
212
|
+
# Entity is a class
|
|
213
|
+
self.graph.add((entity_uri, RDF.type, OWL.Class))
|
|
214
|
+
self.graph.add((entity_uri, RDFS.label, Literal(entity_name)))
|
|
215
|
+
|
|
216
|
+
if entity.get("description"):
|
|
217
|
+
self.graph.add((entity_uri, RDFS.comment, Literal(entity["description"])))
|
|
218
|
+
|
|
219
|
+
# Add entity type annotation
|
|
220
|
+
if entity.get("entityType"):
|
|
221
|
+
self.graph.add((entity_uri, self.ont.entityType, Literal(entity["entityType"])))
|
|
222
|
+
|
|
223
|
+
# Add source table annotation (for schema binding)
|
|
224
|
+
if entity.get("source"):
|
|
225
|
+
self.graph.add((entity_uri, self.ont.sourceTable, Literal(entity["source"])))
|
|
226
|
+
|
|
227
|
+
# Add properties as datatype properties
|
|
228
|
+
for prop in entity.get("properties", []):
|
|
229
|
+
self._add_property(entity_uri, safe_name, prop)
|
|
230
|
+
|
|
231
|
+
# Generate default action rules for this entity
|
|
232
|
+
self._generate_entity_action_rules(safe_name, entity)
|
|
233
|
+
|
|
234
|
+
def _add_property(self, entity_uri: URIRef, entity_name: str, prop: Dict[str, Any]):
|
|
235
|
+
"""Add property as OWL datatype property."""
|
|
236
|
+
prop_name = prop.get("name", "")
|
|
237
|
+
if not prop_name:
|
|
238
|
+
return
|
|
239
|
+
|
|
240
|
+
safe_prop_name = self._safe_uri_name(f"{entity_name}_{prop_name}")
|
|
241
|
+
prop_uri = self.ont[safe_prop_name]
|
|
242
|
+
|
|
243
|
+
self.graph.add((prop_uri, RDF.type, OWL.DatatypeProperty))
|
|
244
|
+
self.graph.add((prop_uri, RDFS.label, Literal(prop_name)))
|
|
245
|
+
self.graph.add((prop_uri, RDFS.domain, entity_uri))
|
|
246
|
+
|
|
247
|
+
# Map data type to XSD
|
|
248
|
+
xsd_type = self._map_to_xsd(prop.get("type", "String"))
|
|
249
|
+
self.graph.add((prop_uri, RDFS.range, xsd_type))
|
|
250
|
+
|
|
251
|
+
if prop.get("description"):
|
|
252
|
+
self.graph.add((prop_uri, RDFS.comment, Literal(prop["description"])))
|
|
253
|
+
|
|
254
|
+
# Add constraints as annotations
|
|
255
|
+
for constraint in prop.get("constraints", []):
|
|
256
|
+
self._add_constraint(prop_uri, constraint)
|
|
257
|
+
|
|
258
|
+
def _add_constraint(self, prop_uri: URIRef, constraint: Dict[str, Any]):
|
|
259
|
+
"""Add property constraint as OWL annotation."""
|
|
260
|
+
constraint_type = constraint.get("type", "")
|
|
261
|
+
constraint_value = constraint.get("value")
|
|
262
|
+
|
|
263
|
+
if constraint_type == "range" and isinstance(constraint_value, dict):
|
|
264
|
+
if "min" in constraint_value:
|
|
265
|
+
self.graph.add((prop_uri, self.ont.minValue, Literal(
|
|
266
|
+
constraint_value["min"], datatype=XSD.decimal
|
|
267
|
+
)))
|
|
268
|
+
if "max" in constraint_value:
|
|
269
|
+
self.graph.add((prop_uri, self.ont.maxValue, Literal(
|
|
270
|
+
constraint_value["max"], datatype=XSD.decimal
|
|
271
|
+
)))
|
|
272
|
+
elif constraint_type == "required":
|
|
273
|
+
self.graph.add((prop_uri, self.ont.isRequired, Literal(True, datatype=XSD.boolean)))
|
|
274
|
+
elif constraint_type == "unique":
|
|
275
|
+
self.graph.add((prop_uri, self.ont.isUnique, Literal(True, datatype=XSD.boolean)))
|
|
276
|
+
|
|
277
|
+
def _add_relationship(self, rel: Dict[str, Any]):
|
|
278
|
+
"""Add relationship as OWL object property."""
|
|
279
|
+
rel_type = rel.get("type", "relatedTo")
|
|
280
|
+
from_entity = rel.get("from", "")
|
|
281
|
+
to_entity = rel.get("to", "")
|
|
282
|
+
|
|
283
|
+
if not from_entity or not to_entity:
|
|
284
|
+
return
|
|
285
|
+
|
|
286
|
+
safe_rel_name = self._safe_uri_name(f"{from_entity}_{rel_type}_{to_entity}")
|
|
287
|
+
rel_uri = self.ont[safe_rel_name]
|
|
288
|
+
from_uri = self.ont[self._safe_uri_name(from_entity)]
|
|
289
|
+
to_uri = self.ont[self._safe_uri_name(to_entity)]
|
|
290
|
+
|
|
291
|
+
self.graph.add((rel_uri, RDF.type, OWL.ObjectProperty))
|
|
292
|
+
self.graph.add((rel_uri, RDFS.label, Literal(rel_type)))
|
|
293
|
+
self.graph.add((rel_uri, RDFS.domain, from_uri))
|
|
294
|
+
self.graph.add((rel_uri, RDFS.range, to_uri))
|
|
295
|
+
|
|
296
|
+
if rel.get("description"):
|
|
297
|
+
self.graph.add((rel_uri, RDFS.comment, Literal(rel["description"])))
|
|
298
|
+
|
|
299
|
+
# Add cardinality annotation
|
|
300
|
+
if rel.get("cardinality"):
|
|
301
|
+
self.graph.add((rel_uri, self.ont.cardinality, Literal(rel["cardinality"])))
|
|
302
|
+
|
|
303
|
+
def _add_action_rule(self, rule: Dict[str, Any]):
|
|
304
|
+
"""Convert business rule to OntoGuard action rule."""
|
|
305
|
+
rule_name = rule.get("name", "")
|
|
306
|
+
if not rule_name:
|
|
307
|
+
return
|
|
308
|
+
|
|
309
|
+
safe_name = self._safe_uri_name(rule_name)
|
|
310
|
+
|
|
311
|
+
# Create action class
|
|
312
|
+
action_class_uri = self.ont[f"{safe_name}Action"]
|
|
313
|
+
self.graph.add((action_class_uri, RDF.type, OWL.Class))
|
|
314
|
+
self.graph.add((action_class_uri, RDFS.subClassOf, self.ont.Action))
|
|
315
|
+
self.graph.add((action_class_uri, RDFS.label, Literal(rule_name)))
|
|
316
|
+
|
|
317
|
+
if rule.get("description"):
|
|
318
|
+
self.graph.add((action_class_uri, RDFS.comment, Literal(rule["description"])))
|
|
319
|
+
|
|
320
|
+
# Create action individual with requiresRole and appliesTo
|
|
321
|
+
action_uri = self.ont[f"{safe_name}ActionInstance"]
|
|
322
|
+
self.graph.add((action_uri, RDF.type, action_class_uri))
|
|
323
|
+
self.graph.add((action_uri, RDFS.label, Literal(f"{rule_name} action")))
|
|
324
|
+
|
|
325
|
+
# Map action type
|
|
326
|
+
action_type = rule.get("action", "").lower()
|
|
327
|
+
if action_type:
|
|
328
|
+
self.graph.add((action_uri, self.ont.allowsAction, Literal(action_type)))
|
|
329
|
+
|
|
330
|
+
# Add entity (appliesTo)
|
|
331
|
+
entity = rule.get("entity", "")
|
|
332
|
+
if entity:
|
|
333
|
+
entity_uri = self.ont[self._safe_uri_name(entity)]
|
|
334
|
+
self.graph.add((action_uri, self.ont.appliesTo, entity_uri))
|
|
335
|
+
|
|
336
|
+
# Determine required role from classification or triggers
|
|
337
|
+
classification = rule.get("classification", "").lower()
|
|
338
|
+
triggers = rule.get("triggers", [])
|
|
339
|
+
|
|
340
|
+
# Map classification to required role
|
|
341
|
+
role_mapping = {
|
|
342
|
+
"critical": "Admin",
|
|
343
|
+
"high": "Admin",
|
|
344
|
+
"medium": "Editor",
|
|
345
|
+
"low": "Viewer",
|
|
346
|
+
"notify": "Analyst",
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
required_role = role_mapping.get(classification, "Viewer")
|
|
350
|
+
if "NotifyOperations" in triggers:
|
|
351
|
+
required_role = "Admin"
|
|
352
|
+
|
|
353
|
+
role_uri = self.ont[required_role]
|
|
354
|
+
self.graph.add((action_uri, self.ont.requiresRole, role_uri))
|
|
355
|
+
|
|
356
|
+
# Add condition as annotation
|
|
357
|
+
if rule.get("condition"):
|
|
358
|
+
self.graph.add((action_uri, self.ont.ruleCondition, Literal(rule["condition"])))
|
|
359
|
+
|
|
360
|
+
def _generate_entity_action_rules(self, entity_name: str, entity: Dict[str, Any]): # noqa: ARG002
|
|
361
|
+
"""Generate default CRUD action rules for an entity."""
|
|
362
|
+
# Standard actions: read, create, update, delete
|
|
363
|
+
actions = [
|
|
364
|
+
("read", "Viewer"),
|
|
365
|
+
("create", "Editor"),
|
|
366
|
+
("update", "Editor"),
|
|
367
|
+
("delete", "Admin"),
|
|
368
|
+
]
|
|
369
|
+
|
|
370
|
+
entity_uri = self.ont[entity_name]
|
|
371
|
+
|
|
372
|
+
for action, default_role in actions:
|
|
373
|
+
# Create action individual
|
|
374
|
+
action_name = f"{action}_{entity_name}"
|
|
375
|
+
action_uri = self.ont[self._safe_uri_name(action_name)]
|
|
376
|
+
|
|
377
|
+
# Determine action class
|
|
378
|
+
action_class = {
|
|
379
|
+
"read": self.ont.ReadAction,
|
|
380
|
+
"create": self.ont.WriteAction,
|
|
381
|
+
"update": self.ont.WriteAction,
|
|
382
|
+
"delete": self.ont.DeleteAction,
|
|
383
|
+
}.get(action, self.ont.Action)
|
|
384
|
+
|
|
385
|
+
self.graph.add((action_uri, RDF.type, action_class))
|
|
386
|
+
self.graph.add((action_uri, RDFS.label, Literal(f"{action} {entity_name}")))
|
|
387
|
+
self.graph.add((action_uri, self.ont.allowsAction, Literal(action)))
|
|
388
|
+
self.graph.add((action_uri, self.ont.appliesTo, entity_uri))
|
|
389
|
+
self.graph.add((action_uri, self.ont.requiresRole, self.ont[default_role]))
|
|
390
|
+
|
|
391
|
+
def _add_schema_bindings(self):
|
|
392
|
+
"""Add schema bindings as annotations for drift detection."""
|
|
393
|
+
data_bindings = self.fabric_iq.get("dataBindings", {})
|
|
394
|
+
|
|
395
|
+
for entity_name, binding in data_bindings.items():
|
|
396
|
+
entity_uri = self.ont[self._safe_uri_name(entity_name)]
|
|
397
|
+
|
|
398
|
+
if binding.get("source"):
|
|
399
|
+
self.graph.add((entity_uri, self.ont.schemaSource, Literal(binding["source"])))
|
|
400
|
+
|
|
401
|
+
# Add column mappings
|
|
402
|
+
for prop_name, column_name in binding.get("mapping", {}).items():
|
|
403
|
+
prop_uri = self.ont[self._safe_uri_name(f"{entity_name}_{prop_name}")]
|
|
404
|
+
self.graph.add((prop_uri, self.ont.sourceColumn, Literal(column_name)))
|
|
405
|
+
|
|
406
|
+
def _safe_uri_name(self, name: str) -> str:
|
|
407
|
+
"""Convert name to valid URI component."""
|
|
408
|
+
# Replace spaces and special characters
|
|
409
|
+
safe = name.replace(" ", "_").replace("-", "_").replace(".", "_")
|
|
410
|
+
# Remove any remaining invalid characters
|
|
411
|
+
safe = "".join(c for c in safe if c.isalnum() or c == "_")
|
|
412
|
+
return safe
|
|
413
|
+
|
|
414
|
+
def _map_to_xsd(self, data_type: str) -> URIRef:
|
|
415
|
+
"""Map Fabric IQ data type to XSD type."""
|
|
416
|
+
type_mapping = {
|
|
417
|
+
"String": XSD.string,
|
|
418
|
+
"string": XSD.string,
|
|
419
|
+
"Integer": XSD.integer,
|
|
420
|
+
"integer": XSD.integer,
|
|
421
|
+
"int": XSD.integer,
|
|
422
|
+
"Decimal": XSD.decimal,
|
|
423
|
+
"decimal": XSD.decimal,
|
|
424
|
+
"float": XSD.decimal,
|
|
425
|
+
"Double": XSD.double,
|
|
426
|
+
"double": XSD.double,
|
|
427
|
+
"Date": XSD.date,
|
|
428
|
+
"date": XSD.date,
|
|
429
|
+
"DateTime": XSD.dateTime,
|
|
430
|
+
"dateTime": XSD.dateTime,
|
|
431
|
+
"Boolean": XSD.boolean,
|
|
432
|
+
"boolean": XSD.boolean,
|
|
433
|
+
"bool": XSD.boolean,
|
|
434
|
+
}
|
|
435
|
+
return type_mapping.get(data_type, XSD.string)
|
|
436
|
+
|
|
437
|
+
def save(self, filepath: str, format: str = "xml"):
|
|
438
|
+
"""
|
|
439
|
+
Save OWL export to file.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
filepath: Path to save file
|
|
443
|
+
format: Output format ("xml", "turtle", "json-ld", "n3")
|
|
444
|
+
"""
|
|
445
|
+
output = self.convert(format=format)
|
|
446
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
447
|
+
f.write(output)
|
|
448
|
+
logger.info(f"Saved OWL export to {filepath}")
|
|
449
|
+
|
|
450
|
+
@classmethod
|
|
451
|
+
def from_fabric_iq_exporter(cls, exporter, base_uri: Optional[str] = None):
|
|
452
|
+
"""
|
|
453
|
+
Create converter from FabricIQExporter instance.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
exporter: FabricIQExporter instance
|
|
457
|
+
base_uri: Optional base URI
|
|
458
|
+
|
|
459
|
+
Returns:
|
|
460
|
+
FabricIQToOWLConverter instance
|
|
461
|
+
"""
|
|
462
|
+
fabric_iq_json = exporter.export()
|
|
463
|
+
return cls(fabric_iq_json, base_uri)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""
|
|
2
|
+
JSON Schema Exporter
|
|
3
|
+
|
|
4
|
+
Exports ontologies to JSON Schema format (draft-07).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from typing import Dict
|
|
9
|
+
|
|
10
|
+
from powerbi_ontology.ontology_generator import Ontology, OntologyEntity
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class JSONSchemaExporter:
|
|
16
|
+
"""
|
|
17
|
+
Exports ontologies to JSON Schema format.
|
|
18
|
+
|
|
19
|
+
Uses JSON Schema draft-07 standard for maximum compatibility.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
def __init__(self, ontology: Ontology):
|
|
23
|
+
"""
|
|
24
|
+
Initialize JSON Schema exporter.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
ontology: Ontology to export
|
|
28
|
+
"""
|
|
29
|
+
self.ontology = ontology
|
|
30
|
+
|
|
31
|
+
def export(self) -> Dict:
|
|
32
|
+
"""
|
|
33
|
+
Export ontology to JSON Schema format.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Dictionary in JSON Schema format
|
|
37
|
+
"""
|
|
38
|
+
logger.info(f"Exporting ontology '{self.ontology.name}' to JSON Schema format")
|
|
39
|
+
|
|
40
|
+
schema = {
|
|
41
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
42
|
+
"$id": f"https://example.com/ontologies/{self.ontology.name}.schema.json",
|
|
43
|
+
"title": self.ontology.name,
|
|
44
|
+
"description": f"Ontology extracted from {self.ontology.source}",
|
|
45
|
+
"version": self.ontology.version,
|
|
46
|
+
"definitions": {}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
# Add entity definitions
|
|
50
|
+
for entity in self.ontology.entities:
|
|
51
|
+
schema["definitions"][entity.name] = self._entity_to_json_schema(entity)
|
|
52
|
+
|
|
53
|
+
# Add root schema with all entities
|
|
54
|
+
schema["properties"] = {
|
|
55
|
+
entity.name: {"$ref": f"#/definitions/{entity.name}"}
|
|
56
|
+
for entity in self.ontology.entities
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return schema
|
|
60
|
+
|
|
61
|
+
def _entity_to_json_schema(self, entity: OntologyEntity) -> Dict:
|
|
62
|
+
"""Convert entity to JSON Schema."""
|
|
63
|
+
properties = {}
|
|
64
|
+
required = []
|
|
65
|
+
|
|
66
|
+
for prop in entity.properties:
|
|
67
|
+
prop_schema = {
|
|
68
|
+
"type": self._map_type_to_json_schema(prop.data_type),
|
|
69
|
+
"description": prop.description
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
# Add constraints
|
|
73
|
+
for constraint in prop.constraints:
|
|
74
|
+
if constraint.type == "range":
|
|
75
|
+
if isinstance(constraint.value, dict):
|
|
76
|
+
if "min" in constraint.value:
|
|
77
|
+
prop_schema["minimum"] = constraint.value["min"]
|
|
78
|
+
if "max" in constraint.value:
|
|
79
|
+
prop_schema["maximum"] = constraint.value["max"]
|
|
80
|
+
elif constraint.type == "enum":
|
|
81
|
+
prop_schema["enum"] = constraint.value
|
|
82
|
+
elif constraint.type == "regex":
|
|
83
|
+
prop_schema["pattern"] = constraint.value
|
|
84
|
+
|
|
85
|
+
properties[prop.name] = prop_schema
|
|
86
|
+
|
|
87
|
+
if prop.required:
|
|
88
|
+
required.append(prop.name)
|
|
89
|
+
|
|
90
|
+
schema = {
|
|
91
|
+
"type": "object",
|
|
92
|
+
"description": entity.description,
|
|
93
|
+
"properties": properties
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
if required:
|
|
97
|
+
schema["required"] = required
|
|
98
|
+
|
|
99
|
+
return schema
|
|
100
|
+
|
|
101
|
+
def _map_type_to_json_schema(self, data_type: str) -> str:
|
|
102
|
+
"""Map ontology data type to JSON Schema type."""
|
|
103
|
+
type_mapping = {
|
|
104
|
+
"String": "string",
|
|
105
|
+
"Integer": "integer",
|
|
106
|
+
"Decimal": "number",
|
|
107
|
+
"Date": "string", # JSON Schema uses string for dates
|
|
108
|
+
"Boolean": "boolean"
|
|
109
|
+
}
|
|
110
|
+
return type_mapping.get(data_type, "string")
|