naas-abi-core 1.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- assets/favicon.ico +0 -0
- assets/logo.png +0 -0
- naas_abi_core/__init__.py +1 -0
- naas_abi_core/apps/api/api.py +245 -0
- naas_abi_core/apps/api/api_test.py +281 -0
- naas_abi_core/apps/api/openapi_doc.py +144 -0
- naas_abi_core/apps/mcp/Dockerfile.mcp +35 -0
- naas_abi_core/apps/mcp/mcp_server.py +243 -0
- naas_abi_core/apps/mcp/mcp_server_test.py +163 -0
- naas_abi_core/apps/terminal_agent/main.py +555 -0
- naas_abi_core/apps/terminal_agent/terminal_style.py +175 -0
- naas_abi_core/engine/Engine.py +87 -0
- naas_abi_core/engine/EngineProxy.py +109 -0
- naas_abi_core/engine/Engine_test.py +6 -0
- naas_abi_core/engine/IEngine.py +91 -0
- naas_abi_core/engine/conftest.py +45 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration.py +216 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_Deploy.py +7 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_GenericLoader.py +49 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_ObjectStorageService.py +159 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_ObjectStorageService_test.py +26 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_SecretService.py +138 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_SecretService_test.py +74 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_TripleStoreService.py +224 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_TripleStoreService_test.py +109 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_VectorStoreService.py +76 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_VectorStoreService_test.py +33 -0
- naas_abi_core/engine/engine_configuration/EngineConfiguration_test.py +9 -0
- naas_abi_core/engine/engine_configuration/utils/PydanticModelValidator.py +15 -0
- naas_abi_core/engine/engine_loaders/EngineModuleLoader.py +302 -0
- naas_abi_core/engine/engine_loaders/EngineOntologyLoader.py +16 -0
- naas_abi_core/engine/engine_loaders/EngineServiceLoader.py +47 -0
- naas_abi_core/integration/__init__.py +7 -0
- naas_abi_core/integration/integration.py +28 -0
- naas_abi_core/models/Model.py +198 -0
- naas_abi_core/models/OpenRouter.py +18 -0
- naas_abi_core/models/OpenRouter_test.py +36 -0
- naas_abi_core/module/Module.py +252 -0
- naas_abi_core/module/ModuleAgentLoader.py +50 -0
- naas_abi_core/module/ModuleUtils.py +20 -0
- naas_abi_core/modules/templatablesparqlquery/README.md +196 -0
- naas_abi_core/modules/templatablesparqlquery/__init__.py +39 -0
- naas_abi_core/modules/templatablesparqlquery/ontologies/TemplatableSparqlQueryOntology.ttl +116 -0
- naas_abi_core/modules/templatablesparqlquery/workflows/GenericWorkflow.py +48 -0
- naas_abi_core/modules/templatablesparqlquery/workflows/TemplatableSparqlQueryLoader.py +192 -0
- naas_abi_core/pipeline/__init__.py +6 -0
- naas_abi_core/pipeline/pipeline.py +70 -0
- naas_abi_core/services/__init__.py +0 -0
- naas_abi_core/services/agent/Agent.py +1619 -0
- naas_abi_core/services/agent/AgentMemory_test.py +28 -0
- naas_abi_core/services/agent/Agent_test.py +214 -0
- naas_abi_core/services/agent/IntentAgent.py +1179 -0
- naas_abi_core/services/agent/IntentAgent_test.py +139 -0
- naas_abi_core/services/agent/beta/Embeddings.py +181 -0
- naas_abi_core/services/agent/beta/IntentMapper.py +120 -0
- naas_abi_core/services/agent/beta/LocalModel.py +88 -0
- naas_abi_core/services/agent/beta/VectorStore.py +89 -0
- naas_abi_core/services/agent/test_agent_memory.py +278 -0
- naas_abi_core/services/agent/test_postgres_integration.py +145 -0
- naas_abi_core/services/cache/CacheFactory.py +31 -0
- naas_abi_core/services/cache/CachePort.py +63 -0
- naas_abi_core/services/cache/CacheService.py +246 -0
- naas_abi_core/services/cache/CacheService_test.py +85 -0
- naas_abi_core/services/cache/adapters/secondary/CacheFSAdapter.py +39 -0
- naas_abi_core/services/object_storage/ObjectStorageFactory.py +57 -0
- naas_abi_core/services/object_storage/ObjectStoragePort.py +47 -0
- naas_abi_core/services/object_storage/ObjectStorageService.py +41 -0
- naas_abi_core/services/object_storage/adapters/secondary/ObjectStorageSecondaryAdapterFS.py +52 -0
- naas_abi_core/services/object_storage/adapters/secondary/ObjectStorageSecondaryAdapterNaas.py +131 -0
- naas_abi_core/services/object_storage/adapters/secondary/ObjectStorageSecondaryAdapterS3.py +171 -0
- naas_abi_core/services/ontology/OntologyPorts.py +36 -0
- naas_abi_core/services/ontology/OntologyService.py +17 -0
- naas_abi_core/services/ontology/adaptors/secondary/OntologyService_SecondaryAdaptor_NERPort.py +37 -0
- naas_abi_core/services/secret/Secret.py +138 -0
- naas_abi_core/services/secret/SecretPorts.py +45 -0
- naas_abi_core/services/secret/Secret_test.py +65 -0
- naas_abi_core/services/secret/adaptors/secondary/Base64Secret.py +57 -0
- naas_abi_core/services/secret/adaptors/secondary/Base64Secret_test.py +39 -0
- naas_abi_core/services/secret/adaptors/secondary/NaasSecret.py +88 -0
- naas_abi_core/services/secret/adaptors/secondary/NaasSecret_test.py +25 -0
- naas_abi_core/services/secret/adaptors/secondary/dotenv_secret_secondaryadaptor.py +29 -0
- naas_abi_core/services/triple_store/TripleStoreFactory.py +116 -0
- naas_abi_core/services/triple_store/TripleStorePorts.py +223 -0
- naas_abi_core/services/triple_store/TripleStoreService.py +419 -0
- naas_abi_core/services/triple_store/adaptors/secondary/AWSNeptune.py +1300 -0
- naas_abi_core/services/triple_store/adaptors/secondary/AWSNeptune_test.py +284 -0
- naas_abi_core/services/triple_store/adaptors/secondary/Oxigraph.py +597 -0
- naas_abi_core/services/triple_store/adaptors/secondary/Oxigraph_test.py +1474 -0
- naas_abi_core/services/triple_store/adaptors/secondary/TripleStoreService__SecondaryAdaptor__Filesystem.py +223 -0
- naas_abi_core/services/triple_store/adaptors/secondary/TripleStoreService__SecondaryAdaptor__ObjectStorage.py +234 -0
- naas_abi_core/services/triple_store/adaptors/secondary/base/TripleStoreService__SecondaryAdaptor__FileBase.py +18 -0
- naas_abi_core/services/vector_store/IVectorStorePort.py +101 -0
- naas_abi_core/services/vector_store/IVectorStorePort_test.py +189 -0
- naas_abi_core/services/vector_store/VectorStoreFactory.py +47 -0
- naas_abi_core/services/vector_store/VectorStoreService.py +171 -0
- naas_abi_core/services/vector_store/VectorStoreService_test.py +185 -0
- naas_abi_core/services/vector_store/__init__.py +13 -0
- naas_abi_core/services/vector_store/adapters/QdrantAdapter.py +251 -0
- naas_abi_core/services/vector_store/adapters/QdrantAdapter_test.py +57 -0
- naas_abi_core/tests/test_services_imports.py +69 -0
- naas_abi_core/utils/Expose.py +55 -0
- naas_abi_core/utils/Graph.py +182 -0
- naas_abi_core/utils/JSON.py +49 -0
- naas_abi_core/utils/LazyLoader.py +44 -0
- naas_abi_core/utils/Logger.py +12 -0
- naas_abi_core/utils/OntologyReasoner.py +141 -0
- naas_abi_core/utils/OntologyYaml.py +681 -0
- naas_abi_core/utils/SPARQL.py +256 -0
- naas_abi_core/utils/Storage.py +33 -0
- naas_abi_core/utils/StorageUtils.py +398 -0
- naas_abi_core/utils/String.py +52 -0
- naas_abi_core/utils/Workers.py +114 -0
- naas_abi_core/utils/__init__.py +0 -0
- naas_abi_core/utils/onto2py/README.md +0 -0
- naas_abi_core/utils/onto2py/__init__.py +10 -0
- naas_abi_core/utils/onto2py/__main__.py +29 -0
- naas_abi_core/utils/onto2py/onto2py.py +611 -0
- naas_abi_core/utils/onto2py/tests/ttl2py_test.py +271 -0
- naas_abi_core/workflow/__init__.py +5 -0
- naas_abi_core/workflow/workflow.py +48 -0
- naas_abi_core-1.4.1.dist-info/METADATA +630 -0
- naas_abi_core-1.4.1.dist-info/RECORD +124 -0
- naas_abi_core-1.4.1.dist-info/WHEEL +4 -0
- naas_abi_core-1.4.1.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import hashlib
|
|
3
|
+
import io
|
|
4
|
+
import os
|
|
5
|
+
import uuid
|
|
6
|
+
from typing import Callable, List, Tuple
|
|
7
|
+
|
|
8
|
+
import pydash
|
|
9
|
+
import rdflib
|
|
10
|
+
from naas_abi_core import logger
|
|
11
|
+
from naas_abi_core.services.triple_store.TripleStorePorts import (
|
|
12
|
+
ITripleStorePort,
|
|
13
|
+
ITripleStoreService,
|
|
14
|
+
OntologyEvent,
|
|
15
|
+
)
|
|
16
|
+
from naas_abi_core.utils.Workers import Job, WorkerPool
|
|
17
|
+
from rdflib import RDF, Graph, URIRef
|
|
18
|
+
|
|
19
|
+
SCHEMA_TTL = """
|
|
20
|
+
@prefix internal: <http://triple-store.internal#> .
|
|
21
|
+
@prefix owl: <http://www.w3.org/2002/07/owl#> .
|
|
22
|
+
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
|
|
23
|
+
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
|
|
24
|
+
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
|
|
25
|
+
|
|
26
|
+
internal:Schema a owl:Class ;
|
|
27
|
+
rdfs:label "Schema" ;
|
|
28
|
+
rdfs:comment "Represents a schema file that has been loaded into the triple store" .
|
|
29
|
+
|
|
30
|
+
internal:hash a owl:DatatypeProperty ;
|
|
31
|
+
rdfs:domain internal:Schema ;
|
|
32
|
+
rdfs:range xsd:string ;
|
|
33
|
+
rdfs:label "hash" ;
|
|
34
|
+
rdfs:comment "SHA-256 hash of the schema content" .
|
|
35
|
+
|
|
36
|
+
internal:fileLastUpdateTime a owl:DatatypeProperty ;
|
|
37
|
+
rdfs:domain internal:Schema ;
|
|
38
|
+
rdfs:range xsd:dateTime ;
|
|
39
|
+
rdfs:label "file last update time" ;
|
|
40
|
+
rdfs:comment "Last modification timestamp of the schema file" .
|
|
41
|
+
|
|
42
|
+
internal:filePath a owl:DatatypeProperty ;
|
|
43
|
+
rdfs:domain internal:Schema ;
|
|
44
|
+
rdfs:range xsd:string ;
|
|
45
|
+
rdfs:label "file path" ;
|
|
46
|
+
rdfs:comment "Path to the schema file" .
|
|
47
|
+
|
|
48
|
+
internal:content a owl:DatatypeProperty ;
|
|
49
|
+
rdfs:domain internal:Schema ;
|
|
50
|
+
rdfs:range xsd:base64Binary ;
|
|
51
|
+
rdfs:label "content" ;
|
|
52
|
+
rdfs:comment "Base64 encoded content of the schema file" .
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class TripleStoreService(ITripleStoreService):
|
|
57
|
+
"""TripleStoreService provides CRUD operations and SPARQL querying capabilities for ontologies.
|
|
58
|
+
|
|
59
|
+
This service acts as a facade for ontology storage and retrieval operations. It handles storing,
|
|
60
|
+
retrieving, merging and querying of RDF ontologies while providing optional filtering of
|
|
61
|
+
non-named individuals.
|
|
62
|
+
|
|
63
|
+
Attributes:
|
|
64
|
+
__triple_store_adapter (ITripleStorePort): The storage adapter implementation used for
|
|
65
|
+
persisting and retrieving ontologies.
|
|
66
|
+
|
|
67
|
+
Example:
|
|
68
|
+
>>> store = TripleStoreService(FileSystemTripleStore("ontologies/"))
|
|
69
|
+
>>> ontology = Graph()
|
|
70
|
+
>>> # ... populate ontology ...
|
|
71
|
+
>>> store.store("my_ontology", ontology)
|
|
72
|
+
>>> results = store.query("SELECT ?s WHERE { ?s a owl:Class }")
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
def __init__(
|
|
76
|
+
self,
|
|
77
|
+
triple_store_adapter: ITripleStorePort,
|
|
78
|
+
views: List[Tuple[URIRef | None, URIRef | None, URIRef | None]] = [
|
|
79
|
+
(None, RDF.type, None)
|
|
80
|
+
],
|
|
81
|
+
trigger_worker_pool_size: int = 10,
|
|
82
|
+
):
|
|
83
|
+
self.__triple_store_adapter = triple_store_adapter
|
|
84
|
+
self.__event_listeners = {}
|
|
85
|
+
self.__views: List[Tuple[URIRef | None, URIRef | None, URIRef | None]] = views
|
|
86
|
+
|
|
87
|
+
self.__trigger_worker_pool = WorkerPool(trigger_worker_pool_size)
|
|
88
|
+
|
|
89
|
+
# Load SCHEMA_TTL in IOBuffer
|
|
90
|
+
schema_ttl_buffer = io.StringIO(SCHEMA_TTL)
|
|
91
|
+
self.insert(Graph().parse(schema_ttl_buffer, format="turtle"))
|
|
92
|
+
|
|
93
|
+
self.init_views()
|
|
94
|
+
|
|
95
|
+
def __del__(self):
|
|
96
|
+
self.__trigger_worker_pool.shutdown()
|
|
97
|
+
|
|
98
|
+
def init_views(self):
|
|
99
|
+
for view in self.__views:
|
|
100
|
+
self.subscribe(
|
|
101
|
+
view,
|
|
102
|
+
OntologyEvent.INSERT,
|
|
103
|
+
lambda event, triple: self.__triple_store_adapter.handle_view_event(
|
|
104
|
+
view, event, triple
|
|
105
|
+
),
|
|
106
|
+
)
|
|
107
|
+
self.subscribe(
|
|
108
|
+
view,
|
|
109
|
+
OntologyEvent.DELETE,
|
|
110
|
+
lambda event, triple: self.__triple_store_adapter.handle_view_event(
|
|
111
|
+
view, event, triple
|
|
112
|
+
),
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
def insert(self, triples: Graph):
|
|
116
|
+
# Insert the triples into the store
|
|
117
|
+
self.__triple_store_adapter.insert(triples)
|
|
118
|
+
|
|
119
|
+
# Notify listeners of the insert
|
|
120
|
+
for s, p, o in triples.triples((None, None, None)):
|
|
121
|
+
for ss, sp, so in self.__event_listeners:
|
|
122
|
+
if (
|
|
123
|
+
(ss is None or str(ss) == str(s))
|
|
124
|
+
and (sp is None or str(sp) == str(p))
|
|
125
|
+
and (so is None or str(so) == str(o))
|
|
126
|
+
):
|
|
127
|
+
if OntologyEvent.INSERT in self.__event_listeners[ss, sp, so]:
|
|
128
|
+
for _, callback, background in self.__event_listeners[
|
|
129
|
+
ss, sp, so
|
|
130
|
+
][OntologyEvent.INSERT]:
|
|
131
|
+
if background:
|
|
132
|
+
self.__trigger_worker_pool.submit(
|
|
133
|
+
Job(None, callback, OntologyEvent.INSERT, (s, p, o))
|
|
134
|
+
)
|
|
135
|
+
else:
|
|
136
|
+
callback(OntologyEvent.INSERT, (s, p, o))
|
|
137
|
+
|
|
138
|
+
def remove(self, triples: Graph):
|
|
139
|
+
# Remove the triples from the store
|
|
140
|
+
self.__triple_store_adapter.remove(triples)
|
|
141
|
+
|
|
142
|
+
# Notify listeners of the delete
|
|
143
|
+
for s, p, o in triples.triples((None, None, None)):
|
|
144
|
+
for ss, sp, so in self.__event_listeners:
|
|
145
|
+
if (
|
|
146
|
+
(ss is None or str(ss) == str(s))
|
|
147
|
+
and (sp is None or str(sp) == str(p))
|
|
148
|
+
and (so is None or str(so) == str(o))
|
|
149
|
+
):
|
|
150
|
+
if OntologyEvent.DELETE in self.__event_listeners[ss, sp, so]:
|
|
151
|
+
for _, callback, background in self.__event_listeners[
|
|
152
|
+
ss, sp, so
|
|
153
|
+
][OntologyEvent.DELETE]:
|
|
154
|
+
if background:
|
|
155
|
+
self.__trigger_worker_pool.submit(
|
|
156
|
+
Job(None, callback, OntologyEvent.DELETE, (s, p, o))
|
|
157
|
+
)
|
|
158
|
+
else:
|
|
159
|
+
callback(OntologyEvent.DELETE, (s, p, o))
|
|
160
|
+
|
|
161
|
+
def get(self) -> Graph:
|
|
162
|
+
return self.__triple_store_adapter.get()
|
|
163
|
+
|
|
164
|
+
def query(self, query: str) -> rdflib.query.Result:
|
|
165
|
+
return self.__triple_store_adapter.query(query)
|
|
166
|
+
|
|
167
|
+
def query_view(self, view: str, query: str) -> rdflib.query.Result:
|
|
168
|
+
return self.__triple_store_adapter.query_view(view, query)
|
|
169
|
+
|
|
170
|
+
def subscribe(
|
|
171
|
+
self,
|
|
172
|
+
topic: tuple,
|
|
173
|
+
event_type: OntologyEvent,
|
|
174
|
+
callback: Callable[[OntologyEvent, Tuple[str, str, str]], None],
|
|
175
|
+
background: bool = False,
|
|
176
|
+
) -> str:
|
|
177
|
+
if topic not in self.__event_listeners:
|
|
178
|
+
self.__event_listeners[topic] = {}
|
|
179
|
+
if event_type not in self.__event_listeners[topic]:
|
|
180
|
+
self.__event_listeners[topic][event_type] = []
|
|
181
|
+
|
|
182
|
+
subscription_id = str(uuid.uuid4())
|
|
183
|
+
|
|
184
|
+
self.__event_listeners[topic][event_type].append(
|
|
185
|
+
(subscription_id, callback, background)
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
return subscription_id
|
|
189
|
+
|
|
190
|
+
def unsubscribe(self, subscription_id: str) -> None:
|
|
191
|
+
for topic in self.__event_listeners:
|
|
192
|
+
for event_type in self.__event_listeners[topic]:
|
|
193
|
+
self.__event_listeners[topic][event_type] = pydash.filter_(
|
|
194
|
+
self.__event_listeners[topic][event_type],
|
|
195
|
+
lambda x: x[0] != subscription_id,
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
def get_subject_graph(self, subject: str) -> Graph:
|
|
199
|
+
return self.__triple_store_adapter.get_subject_graph(URIRef(subject))
|
|
200
|
+
|
|
201
|
+
###################lib/abi/services/ontology/OntologyService.py#########################################
|
|
202
|
+
# Schema Management
|
|
203
|
+
############################################################
|
|
204
|
+
|
|
205
|
+
def load_schemas(self, filepaths: List[str]):
|
|
206
|
+
# First build a cache of all schemas to speed up the process.
|
|
207
|
+
schema_cache = Graph()
|
|
208
|
+
|
|
209
|
+
results = self.query("""
|
|
210
|
+
PREFIX internal: <http://triple-store.internal#>
|
|
211
|
+
SELECT ?schema ?filePath ?hash ?fileLastUpdateTime ?content
|
|
212
|
+
WHERE {
|
|
213
|
+
?schema a internal:Schema ;
|
|
214
|
+
internal:filePath ?filePath ;
|
|
215
|
+
internal:hash ?hash ;
|
|
216
|
+
internal:fileLastUpdateTime ?fileLastUpdateTime ;
|
|
217
|
+
internal:content ?content .
|
|
218
|
+
}
|
|
219
|
+
""")
|
|
220
|
+
|
|
221
|
+
for row in results:
|
|
222
|
+
assert isinstance(row, rdflib.query.ResultRow)
|
|
223
|
+
schema, filePath, hash, fileLastUpdateTime, content = row
|
|
224
|
+
schema_cache.add(
|
|
225
|
+
(schema, RDF.type, URIRef("http://triple-store.internal#Schema"))
|
|
226
|
+
)
|
|
227
|
+
schema_cache.add(
|
|
228
|
+
(schema, URIRef("http://triple-store.internal#filePath"), filePath)
|
|
229
|
+
)
|
|
230
|
+
schema_cache.add(
|
|
231
|
+
(schema, URIRef("http://triple-store.internal#hash"), hash)
|
|
232
|
+
)
|
|
233
|
+
schema_cache.add(
|
|
234
|
+
(
|
|
235
|
+
schema,
|
|
236
|
+
URIRef("http://triple-store.internal#fileLastUpdateTime"),
|
|
237
|
+
fileLastUpdateTime,
|
|
238
|
+
)
|
|
239
|
+
)
|
|
240
|
+
schema_cache.add(
|
|
241
|
+
(schema, URIRef("http://triple-store.internal#content"), content)
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
for filepath in filepaths:
|
|
245
|
+
self.load_schema(filepath, schema_cache)
|
|
246
|
+
|
|
247
|
+
def load_schema(self, filepath: str, schema_cache: Graph | None = None):
|
|
248
|
+
logger.debug(f"Loading schema: {filepath}")
|
|
249
|
+
if schema_cache is not None:
|
|
250
|
+
|
|
251
|
+
def _read_query_func(query: str):
|
|
252
|
+
return schema_cache.query(query)
|
|
253
|
+
|
|
254
|
+
read_query_func = _read_query_func
|
|
255
|
+
else:
|
|
256
|
+
read_query_func = self.query
|
|
257
|
+
|
|
258
|
+
try:
|
|
259
|
+
query = f'''PREFIX internal: <http://triple-store.internal#>
|
|
260
|
+
SELECT * WHERE {{ ?s internal:filePath "{filepath}" . }}'''
|
|
261
|
+
# logger.debug(f"Query: {query}")
|
|
262
|
+
# Check if schema with filePath == filepath already exists and grab all triples.
|
|
263
|
+
schema_triples: rdflib.query.Result = read_query_func(query)
|
|
264
|
+
|
|
265
|
+
# logger.debug(f"len(list(schema_triples)): {len(list(schema_triples))}")
|
|
266
|
+
# If schema with filePath == filepath already exists, we check if the file has been modified.
|
|
267
|
+
schema_exists_in_store = len(list(schema_triples)) == 1
|
|
268
|
+
logger.debug(f"Schema exists in store: {schema_exists_in_store}")
|
|
269
|
+
if schema_exists_in_store:
|
|
270
|
+
result_rows = list(schema_triples)
|
|
271
|
+
assert len(result_rows) == 1
|
|
272
|
+
assert isinstance(result_rows[0], rdflib.query.ResultRow)
|
|
273
|
+
_SUBJECT_TUPLE_INDEX = 0
|
|
274
|
+
subject = result_rows[0][_SUBJECT_TUPLE_INDEX]
|
|
275
|
+
|
|
276
|
+
# Select * from subject
|
|
277
|
+
triples: rdflib.query.Result = read_query_func(
|
|
278
|
+
f"""PREFIX internal: <http://triple-store.internal#>
|
|
279
|
+
SELECT ?p ?o WHERE {{ <{subject}> ?p ?o . }}"""
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
# Load schema into a dict
|
|
283
|
+
schema_dict = {}
|
|
284
|
+
for row in triples:
|
|
285
|
+
assert isinstance(row, rdflib.query.ResultRow)
|
|
286
|
+
p, o = row
|
|
287
|
+
|
|
288
|
+
schema_dict[str(p).replace("http://triple-store.internal#", "")] = (
|
|
289
|
+
str(o)
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
# Get file last update time
|
|
293
|
+
file_last_update_time = os.path.getmtime(filepath)
|
|
294
|
+
|
|
295
|
+
# Open file and get content.
|
|
296
|
+
with open(filepath, "r") as file:
|
|
297
|
+
new_content = file.read()
|
|
298
|
+
|
|
299
|
+
new_content_hash = hashlib.sha256(
|
|
300
|
+
new_content.encode("utf-8")
|
|
301
|
+
).hexdigest()
|
|
302
|
+
|
|
303
|
+
# If fileLastUpdateTime is the same, return. Otherwise we continue as we need to update the schema.
|
|
304
|
+
if schema_dict["hash"] == new_content_hash:
|
|
305
|
+
logger.debug("Schema is up to date, no need to update.")
|
|
306
|
+
return
|
|
307
|
+
|
|
308
|
+
logger.debug("Schema is not up to date, updating.")
|
|
309
|
+
|
|
310
|
+
# Decode old content
|
|
311
|
+
old_content = base64.b64decode(schema_dict["content"]).decode("utf-8")
|
|
312
|
+
|
|
313
|
+
# Parse old and new schema
|
|
314
|
+
old_schema = Graph().parse(io.StringIO(old_content), format="turtle")
|
|
315
|
+
|
|
316
|
+
new_schema = Graph().parse(io.StringIO(new_content), format="turtle")
|
|
317
|
+
|
|
318
|
+
# Compute addition and deletion triples
|
|
319
|
+
addition_triples = new_schema - old_schema
|
|
320
|
+
deletion_triples = old_schema - new_schema
|
|
321
|
+
|
|
322
|
+
# Insert addition and remove deletion triples
|
|
323
|
+
self.insert(addition_triples)
|
|
324
|
+
self.remove(deletion_triples)
|
|
325
|
+
|
|
326
|
+
# Update schema information in the triple store.
|
|
327
|
+
|
|
328
|
+
self.remove(
|
|
329
|
+
Graph().parse(
|
|
330
|
+
io.StringIO(f'''
|
|
331
|
+
@prefix internal: <http://triple-store.internal#> .
|
|
332
|
+
|
|
333
|
+
<{subject}> internal:hash "{schema_dict["hash"]}" ;
|
|
334
|
+
internal:fileLastUpdateTime "{schema_dict["fileLastUpdateTime"]}" ;
|
|
335
|
+
internal:content "{schema_dict["content"]}" .
|
|
336
|
+
'''),
|
|
337
|
+
format="turtle",
|
|
338
|
+
)
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
self.insert(
|
|
342
|
+
Graph().parse(
|
|
343
|
+
io.StringIO(f'''
|
|
344
|
+
@prefix internal: <http://triple-store.internal#> .
|
|
345
|
+
|
|
346
|
+
<{subject}> internal:hash "{new_content_hash}" ;
|
|
347
|
+
internal:fileLastUpdateTime "{file_last_update_time}" ;
|
|
348
|
+
internal:content "{base64.b64encode(new_content.encode("utf-8")).decode("utf-8")}" .
|
|
349
|
+
'''),
|
|
350
|
+
format="turtle",
|
|
351
|
+
)
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
# Return as we don't need to continue as we have already updated the schema.
|
|
355
|
+
return
|
|
356
|
+
elif not schema_exists_in_store:
|
|
357
|
+
logger.debug("Loading schema in graph as it doesn't exist in store.")
|
|
358
|
+
|
|
359
|
+
# Open file and get content.
|
|
360
|
+
with open(filepath, "r") as file:
|
|
361
|
+
content = file.read()
|
|
362
|
+
|
|
363
|
+
# Compute base64 content
|
|
364
|
+
base64_content = base64.b64encode(content.encode("utf-8")).decode(
|
|
365
|
+
"utf-8"
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
# Compute hash of content
|
|
369
|
+
content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
|
|
370
|
+
|
|
371
|
+
# Parse schema
|
|
372
|
+
g = Graph().parse(filepath)
|
|
373
|
+
|
|
374
|
+
# Insert schema into the triple store
|
|
375
|
+
self.insert(g)
|
|
376
|
+
|
|
377
|
+
# Get file last update time
|
|
378
|
+
file_last_update_time = os.path.getmtime(filepath)
|
|
379
|
+
|
|
380
|
+
# Insert Schema with hash, filePath, fileLastUpdateTime and content to be able to track changes.
|
|
381
|
+
self.insert(
|
|
382
|
+
Graph().parse(
|
|
383
|
+
io.StringIO(f'''
|
|
384
|
+
@prefix internal: <http://triple-store.internal#> .
|
|
385
|
+
|
|
386
|
+
<http://triple-store.internal/{uuid.uuid4()}> a internal:Schema ;
|
|
387
|
+
internal:hash "{content_hash}" ;
|
|
388
|
+
internal:filePath "{filepath}" ;
|
|
389
|
+
internal:fileLastUpdateTime "{file_last_update_time}" ;
|
|
390
|
+
internal:content "{base64_content}" .
|
|
391
|
+
'''),
|
|
392
|
+
format="turtle",
|
|
393
|
+
)
|
|
394
|
+
)
|
|
395
|
+
except Exception as e:
|
|
396
|
+
logger.error(f"Error loading schema ({filepath}): {e}")
|
|
397
|
+
|
|
398
|
+
def get_schema_graph(self) -> Graph:
|
|
399
|
+
contents: rdflib.query.Result = self.query(
|
|
400
|
+
"""PREFIX internal: <http://triple-store.internal#>
|
|
401
|
+
SELECT ?s ?o WHERE { ?s internal:content ?o . }"""
|
|
402
|
+
)
|
|
403
|
+
|
|
404
|
+
graph = Graph()
|
|
405
|
+
|
|
406
|
+
for row in contents:
|
|
407
|
+
assert isinstance(row, rdflib.query.ResultRow)
|
|
408
|
+
_, o = row
|
|
409
|
+
|
|
410
|
+
g = Graph().parse(
|
|
411
|
+
io.StringIO(base64.b64decode(o).decode("utf-8")), format="turtle"
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
graph += g
|
|
415
|
+
|
|
416
|
+
for prefix, namespace in g.namespaces():
|
|
417
|
+
graph.bind(prefix, namespace)
|
|
418
|
+
|
|
419
|
+
return graph
|