rdf4j-python 0.1.3__tar.gz → 0.1.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rdf4j_python-0.1.3/rdf4j_python.egg-info → rdf4j_python-0.1.4}/PKG-INFO +4 -1
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/README.md +1 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/pyproject.toml +5 -1
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/_driver/_async_rdf4j_db.py +1 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/_driver/_async_repository.py +43 -12
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/model/repository_config.py +5 -5
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4/rdf4j_python.egg-info}/PKG-INFO +4 -1
- rdf4j_python-0.1.4/rdf4j_python.egg-info/requires.txt +5 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/tests/test_rdf4j_repository.py +129 -0
- rdf4j_python-0.1.3/rdf4j_python.egg-info/requires.txt +0 -2
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/LICENSE +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/__init__.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/_client/__init__.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/_client/_client.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/_driver/__init__.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/_driver/_async_named_graph.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/exception/__init__.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/exception/repo_exception.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/model/__init__.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/model/_namespace.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/model/_repository_info.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/model/term.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/model/vocabulary.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/utils/__init__.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/utils/const.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python/utils/helpers.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python.egg-info/SOURCES.txt +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python.egg-info/dependency_links.txt +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/rdf4j_python.egg-info/top_level.txt +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/setup.cfg +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/tests/test_async_named_graph.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/tests/test_client.py +0 -0
- {rdf4j_python-0.1.3 → rdf4j_python-0.1.4}/tests/test_helpers.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rdf4j-python
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.4
|
|
4
4
|
Summary: The Python client for RDF4J
|
|
5
5
|
Author-email: Chengxu Bian <cbian564@gmail.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -8,6 +8,8 @@ Description-Content-Type: text/markdown
|
|
|
8
8
|
License-File: LICENSE
|
|
9
9
|
Requires-Dist: httpx>=0.28.1
|
|
10
10
|
Requires-Dist: pyoxigraph>=0.4.10
|
|
11
|
+
Provides-Extra: sparqlwrapper
|
|
12
|
+
Requires-Dist: sparqlwrapper>=2.0.0; extra == "sparqlwrapper"
|
|
11
13
|
Dynamic: license-file
|
|
12
14
|
|
|
13
15
|
# 🐍 rdf4j-python
|
|
@@ -60,6 +62,7 @@ async with AsyncRdf4j("http://localhost:19780/rdf4j-server") as db:
|
|
|
60
62
|
Literal("test_object"),
|
|
61
63
|
)
|
|
62
64
|
await repo.get_statements(subject=IRI("http://example.com/subject"))
|
|
65
|
+
results = await repo.query("SELECT * WHERE { ?s ?p ?o }")
|
|
63
66
|
```
|
|
64
67
|
|
|
65
68
|
For more detailed examples, refer to the [examples](https://github.com/odysa/rdf4j-python/tree/main/examples) directory.
|
|
@@ -48,6 +48,7 @@ async with AsyncRdf4j("http://localhost:19780/rdf4j-server") as db:
|
|
|
48
48
|
Literal("test_object"),
|
|
49
49
|
)
|
|
50
50
|
await repo.get_statements(subject=IRI("http://example.com/subject"))
|
|
51
|
+
results = await repo.query("SELECT * WHERE { ?s ?p ?o }")
|
|
51
52
|
```
|
|
52
53
|
|
|
53
54
|
For more detailed examples, refer to the [examples](https://github.com/odysa/rdf4j-python/tree/main/examples) directory.
|
|
@@ -1,18 +1,22 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "rdf4j-python"
|
|
3
3
|
authors = [{ name = "Chengxu Bian", email = "cbian564@gmail.com" }]
|
|
4
|
-
version = "0.1.
|
|
4
|
+
version = "0.1.4"
|
|
5
5
|
description = "The Python client for RDF4J"
|
|
6
6
|
readme = "README.md"
|
|
7
7
|
requires-python = ">=3.10"
|
|
8
8
|
dependencies = ["httpx>=0.28.1", "pyoxigraph>=0.4.10"]
|
|
9
9
|
|
|
10
|
+
[project.optional-dependencies]
|
|
11
|
+
sparqlwrapper = ["sparqlwrapper>=2.0.0"]
|
|
12
|
+
|
|
10
13
|
[dependency-groups]
|
|
11
14
|
dev = [
|
|
12
15
|
"pytest>=8.3.5",
|
|
13
16
|
"pytest-asyncio>=0.26.0",
|
|
14
17
|
"pytest-docker>=3.2.1",
|
|
15
18
|
"ruff>=0.11.8",
|
|
19
|
+
"ty>=0.0.1a7",
|
|
16
20
|
]
|
|
17
21
|
docs = ["furo>=2024.8.6", "sphinx>=8"]
|
|
18
22
|
|
|
@@ -69,6 +69,7 @@ class AsyncRdf4j:
|
|
|
69
69
|
query_solutions = og.parse_query_results(
|
|
70
70
|
response.text, format=og.QueryResultsFormat.JSON
|
|
71
71
|
)
|
|
72
|
+
assert isinstance(query_solutions, og.QuerySolutions)
|
|
72
73
|
return [
|
|
73
74
|
RepositoryMetadata.from_sparql_query_solution(query_solution)
|
|
74
75
|
for query_solution in query_solutions
|
|
@@ -25,10 +25,21 @@ from rdf4j_python.model.term import (
|
|
|
25
25
|
from rdf4j_python.utils.const import Rdf4jContentType
|
|
26
26
|
from rdf4j_python.utils.helpers import serialize_statements
|
|
27
27
|
|
|
28
|
+
try:
|
|
29
|
+
from SPARQLWrapper import SPARQLWrapper
|
|
30
|
+
|
|
31
|
+
_has_sparql_wrapper = True
|
|
32
|
+
except ImportError:
|
|
33
|
+
_has_sparql_wrapper = False
|
|
34
|
+
|
|
28
35
|
|
|
29
36
|
class AsyncRdf4JRepository:
|
|
30
37
|
"""Asynchronous interface for interacting with an RDF4J repository."""
|
|
31
38
|
|
|
39
|
+
_client: AsyncApiClient
|
|
40
|
+
_repository_id: str
|
|
41
|
+
_sparql_wrapper: Optional["SPARQLWrapper"] = None
|
|
42
|
+
|
|
32
43
|
def __init__(self, client: AsyncApiClient, repository_id: str):
|
|
33
44
|
"""Initializes the repository interface.
|
|
34
45
|
|
|
@@ -39,32 +50,47 @@ class AsyncRdf4JRepository:
|
|
|
39
50
|
self._client = client
|
|
40
51
|
self._repository_id = repository_id
|
|
41
52
|
|
|
53
|
+
async def get_sparql_wrapper(self) -> "SPARQLWrapper":
|
|
54
|
+
"""Returns the SPARQLWrapper for the repository.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
SPARQLWrapper: The SPARQLWrapper for the repository.
|
|
58
|
+
"""
|
|
59
|
+
if not _has_sparql_wrapper:
|
|
60
|
+
raise ImportError(
|
|
61
|
+
"SPARQLWrapper is not installed. Please install it with `pip install rdf4j-python[sparqlwrapper]`"
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
if self._sparql_wrapper is None:
|
|
65
|
+
self._sparql_wrapper = SPARQLWrapper(
|
|
66
|
+
f"{self._client.get_base_url()}/repositories/{self._repository_id}"
|
|
67
|
+
)
|
|
68
|
+
return self._sparql_wrapper
|
|
69
|
+
|
|
42
70
|
async def query(
|
|
43
71
|
self,
|
|
44
72
|
sparql_query: str,
|
|
45
73
|
infer: bool = True,
|
|
46
|
-
|
|
47
|
-
):
|
|
74
|
+
) -> og.QuerySolutions | og.QueryBoolean:
|
|
48
75
|
"""Executes a SPARQL SELECT query.
|
|
49
76
|
|
|
50
77
|
Args:
|
|
51
78
|
sparql_query (str): The SPARQL query string.
|
|
52
79
|
infer (bool): Whether to include inferred statements. Defaults to True.
|
|
53
|
-
accept (Rdf4jContentType): The expected response format.
|
|
54
80
|
|
|
55
81
|
Returns:
|
|
56
|
-
|
|
82
|
+
og.QuerySolutions | og.QueryBoolean: Parsed query results.
|
|
57
83
|
"""
|
|
58
84
|
path = f"/repositories/{self._repository_id}"
|
|
59
85
|
params = {"query": sparql_query, "infer": str(infer).lower()}
|
|
60
|
-
headers = {"Accept":
|
|
86
|
+
headers = {"Accept": Rdf4jContentType.SPARQL_RESULTS_JSON}
|
|
61
87
|
response = await self._client.get(path, params=params, headers=headers)
|
|
62
88
|
self._handle_repo_not_found_exception(response)
|
|
63
|
-
|
|
64
|
-
return response.json()
|
|
65
|
-
return response.text
|
|
89
|
+
return og.parse_query_results(response.text, format=og.QueryResultsFormat.JSON)
|
|
66
90
|
|
|
67
|
-
async def update(
|
|
91
|
+
async def update(
|
|
92
|
+
self, sparql_update_query: str, content_type: Rdf4jContentType
|
|
93
|
+
) -> None:
|
|
68
94
|
"""Executes a SPARQL UPDATE command.
|
|
69
95
|
|
|
70
96
|
Args:
|
|
@@ -74,11 +100,15 @@ class AsyncRdf4JRepository:
|
|
|
74
100
|
RepositoryNotFoundException: If the repository doesn't exist.
|
|
75
101
|
httpx.HTTPStatusError: If the update fails.
|
|
76
102
|
"""
|
|
103
|
+
# TODO: handle update results
|
|
77
104
|
path = f"/repositories/{self._repository_id}/statements"
|
|
78
|
-
headers = {"Content-Type":
|
|
79
|
-
response = await self._client.post(
|
|
105
|
+
headers = {"Content-Type": content_type}
|
|
106
|
+
response = await self._client.post(
|
|
107
|
+
path, content=sparql_update_query, headers=headers
|
|
108
|
+
)
|
|
80
109
|
self._handle_repo_not_found_exception(response)
|
|
81
|
-
response.
|
|
110
|
+
if response.status_code != httpx.codes.NO_CONTENT:
|
|
111
|
+
raise RepositoryUpdateException(f"Failed to update: {response.text}")
|
|
82
112
|
|
|
83
113
|
async def get_namespaces(self):
|
|
84
114
|
"""Retrieves all namespaces in the repository.
|
|
@@ -97,6 +127,7 @@ class AsyncRdf4JRepository:
|
|
|
97
127
|
query_solutions = og.parse_query_results(
|
|
98
128
|
response.text, format=og.QueryResultsFormat.JSON
|
|
99
129
|
)
|
|
130
|
+
assert isinstance(query_solutions, og.QuerySolutions)
|
|
100
131
|
return [
|
|
101
132
|
Namespace.from_sparql_query_solution(query_solution)
|
|
102
133
|
for query_solution in query_solutions
|
|
@@ -59,12 +59,12 @@ class RepositoryConfig:
|
|
|
59
59
|
"""
|
|
60
60
|
return self._title
|
|
61
61
|
|
|
62
|
-
def to_turtle(self) ->
|
|
62
|
+
def to_turtle(self) -> bytes | None:
|
|
63
63
|
"""
|
|
64
64
|
Serializes the Repository configuration to Turtle syntax using .
|
|
65
65
|
|
|
66
66
|
Returns:
|
|
67
|
-
|
|
67
|
+
bytes | None: A UTF-8 encoded Turtle string representing the RDF4J repository configuration.
|
|
68
68
|
The serialization includes the repository ID, optional human-readable title,
|
|
69
69
|
and nested repository implementation configuration if available.
|
|
70
70
|
|
|
@@ -909,7 +909,7 @@ class SchemaCachingRDFSInferencerConfig(SailConfig):
|
|
|
909
909
|
"""
|
|
910
910
|
sail_node = super().add_to_graph(graph)
|
|
911
911
|
delegate_node = self.config_params["delegate"].to_rdf(graph)
|
|
912
|
-
graph.add((sail_node, CONFIG.delegate, delegate_node))
|
|
912
|
+
graph.add(Quad(sail_node, CONFIG.delegate, delegate_node, None))
|
|
913
913
|
return sail_node
|
|
914
914
|
|
|
915
915
|
class Builder:
|
|
@@ -1002,7 +1002,7 @@ class DirectTypeHierarchyInferencerConfig(SailConfig):
|
|
|
1002
1002
|
"""
|
|
1003
1003
|
sail_node = super().add_to_graph(graph)
|
|
1004
1004
|
delegate_node = self.config_params["delegate"].to_rdf(graph)
|
|
1005
|
-
graph.add((sail_node, CONFIG["delegate"], delegate_node))
|
|
1005
|
+
graph.add(Quad(sail_node, CONFIG["delegate"], delegate_node, None))
|
|
1006
1006
|
return sail_node
|
|
1007
1007
|
|
|
1008
1008
|
class Builder:
|
|
@@ -1175,7 +1175,7 @@ class SHACLSailConfig(SailConfig):
|
|
|
1175
1175
|
"""
|
|
1176
1176
|
sail_node = super().add_to_graph(graph) # Get the basic node
|
|
1177
1177
|
delegate_node = self.config_params["delegate"].to_rdf(graph)
|
|
1178
|
-
graph.add((sail_node, CONFIG.delegate, delegate_node))
|
|
1178
|
+
graph.add(Quad(sail_node, CONFIG.delegate, delegate_node, None))
|
|
1179
1179
|
|
|
1180
1180
|
# Add SHACL-specific parameters
|
|
1181
1181
|
for key, value in self.config_params.items():
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rdf4j-python
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.4
|
|
4
4
|
Summary: The Python client for RDF4J
|
|
5
5
|
Author-email: Chengxu Bian <cbian564@gmail.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -8,6 +8,8 @@ Description-Content-Type: text/markdown
|
|
|
8
8
|
License-File: LICENSE
|
|
9
9
|
Requires-Dist: httpx>=0.28.1
|
|
10
10
|
Requires-Dist: pyoxigraph>=0.4.10
|
|
11
|
+
Provides-Extra: sparqlwrapper
|
|
12
|
+
Requires-Dist: sparqlwrapper>=2.0.0; extra == "sparqlwrapper"
|
|
11
13
|
Dynamic: license-file
|
|
12
14
|
|
|
13
15
|
# 🐍 rdf4j-python
|
|
@@ -60,6 +62,7 @@ async with AsyncRdf4j("http://localhost:19780/rdf4j-server") as db:
|
|
|
60
62
|
Literal("test_object"),
|
|
61
63
|
)
|
|
62
64
|
await repo.get_statements(subject=IRI("http://example.com/subject"))
|
|
65
|
+
results = await repo.query("SELECT * WHERE { ?s ?p ?o }")
|
|
63
66
|
```
|
|
64
67
|
|
|
65
68
|
For more detailed examples, refer to the [examples](https://github.com/odysa/rdf4j-python/tree/main/examples) directory.
|
|
@@ -4,10 +4,12 @@ from rdf4j_python import AsyncRdf4JRepository
|
|
|
4
4
|
from rdf4j_python.exception.repo_exception import (
|
|
5
5
|
NamespaceException,
|
|
6
6
|
RepositoryNotFoundException,
|
|
7
|
+
RepositoryUpdateException,
|
|
7
8
|
)
|
|
8
9
|
from rdf4j_python.model.term import Literal, Quad, QuadResultSet, Triple
|
|
9
10
|
from rdf4j_python.model.vocabulary import EXAMPLE as ex
|
|
10
11
|
from rdf4j_python.model.vocabulary import RDF, RDFS
|
|
12
|
+
from rdf4j_python.utils.const import Rdf4jContentType
|
|
11
13
|
|
|
12
14
|
ex_ns = ex.namespace
|
|
13
15
|
rdf_ns = RDF.namespace
|
|
@@ -257,3 +259,130 @@ async def test_repo_replace_statements_contexts(mem_repo: AsyncRdf4JRepository):
|
|
|
257
259
|
assert new_statement_2 in resultSet
|
|
258
260
|
assert old_statement_1 not in resultSet
|
|
259
261
|
assert old_statement_2 not in resultSet
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
@pytest.mark.asyncio
|
|
265
|
+
async def test_repo_query_simple_select(mem_repo: AsyncRdf4JRepository):
|
|
266
|
+
await mem_repo.add_statements(
|
|
267
|
+
[
|
|
268
|
+
Triple(ex["subject1"], ex["predicate"], Literal("test_object")),
|
|
269
|
+
Triple(ex["subject2"], ex["predicate"], Literal("test_object2")),
|
|
270
|
+
]
|
|
271
|
+
)
|
|
272
|
+
result = await mem_repo.query("SELECT * WHERE { ?s ?p ?o }")
|
|
273
|
+
result_list = list(result)
|
|
274
|
+
assert len(result_list) == 2
|
|
275
|
+
assert result_list[0]["s"] == ex["subject1"]
|
|
276
|
+
assert result_list[0]["p"] == ex["predicate"]
|
|
277
|
+
assert result_list[0]["o"] == Literal("test_object")
|
|
278
|
+
assert result_list[1]["s"] == ex["subject2"]
|
|
279
|
+
assert result_list[1]["p"] == ex["predicate"]
|
|
280
|
+
assert result_list[1]["o"] == Literal("test_object2")
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
@pytest.mark.asyncio
|
|
284
|
+
async def test_repo_query_simple_select_with_filter(mem_repo: AsyncRdf4JRepository):
|
|
285
|
+
await mem_repo.add_statements(
|
|
286
|
+
[
|
|
287
|
+
Triple(ex["subject1"], ex["predicate"], Literal("test_object")),
|
|
288
|
+
Triple(ex["subject2"], ex["predicate"], Literal("test_object2")),
|
|
289
|
+
]
|
|
290
|
+
)
|
|
291
|
+
result = await mem_repo.query(
|
|
292
|
+
"SELECT * WHERE { ?s ?p ?o FILTER(?o = 'test_object') }"
|
|
293
|
+
)
|
|
294
|
+
result_list = list(result)
|
|
295
|
+
assert len(result_list) == 1
|
|
296
|
+
assert result_list[0]["s"] == ex["subject1"]
|
|
297
|
+
assert result_list[0]["p"] == ex["predicate"]
|
|
298
|
+
assert result_list[0]["o"] == Literal("test_object")
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
@pytest.mark.asyncio
|
|
302
|
+
async def test_repo_group_by(mem_repo: AsyncRdf4JRepository):
|
|
303
|
+
await mem_repo.add_statements(
|
|
304
|
+
[
|
|
305
|
+
Triple(ex["subject1"], ex["predicate"], Literal("test_object")),
|
|
306
|
+
Triple(ex["subject2"], ex["predicate"], Literal("test_object2")),
|
|
307
|
+
]
|
|
308
|
+
)
|
|
309
|
+
result = await mem_repo.query(
|
|
310
|
+
"SELECT ?s (COUNT(?p) AS ?count) WHERE { ?s ?p ?o } GROUP BY ?s"
|
|
311
|
+
)
|
|
312
|
+
result_list = list(result)
|
|
313
|
+
assert len(result_list) == 2
|
|
314
|
+
assert result_list[0]["count"] == Literal(1)
|
|
315
|
+
assert result_list[1]["count"] == Literal(1)
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
@pytest.mark.asyncio
|
|
319
|
+
async def test_repo_query_with_order_by(mem_repo: AsyncRdf4JRepository):
|
|
320
|
+
await mem_repo.add_statements(
|
|
321
|
+
[
|
|
322
|
+
Triple(ex["subject3"], ex["predicate"], Literal("test_object3")),
|
|
323
|
+
Triple(ex["subject1"], ex["predicate"], Literal("test_object1")),
|
|
324
|
+
Triple(ex["subject2"], ex["predicate"], Literal("test_object2")),
|
|
325
|
+
]
|
|
326
|
+
)
|
|
327
|
+
result = await mem_repo.query("SELECT * WHERE { ?s ?p ?o } ORDER BY ?s")
|
|
328
|
+
result_list = list(result)
|
|
329
|
+
assert len(result_list) == 3
|
|
330
|
+
assert result_list[0]["s"] == ex["subject1"]
|
|
331
|
+
assert result_list[1]["s"] == ex["subject2"]
|
|
332
|
+
assert result_list[2]["s"] == ex["subject3"]
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
@pytest.mark.asyncio
|
|
336
|
+
async def test_repo_query_with_limit(mem_repo: AsyncRdf4JRepository):
|
|
337
|
+
await mem_repo.add_statements(
|
|
338
|
+
[
|
|
339
|
+
Triple(ex["subject1"], ex["predicate"], Literal("test_object1")),
|
|
340
|
+
Triple(ex["subject2"], ex["predicate"], Literal("test_object2")),
|
|
341
|
+
Triple(ex["subject3"], ex["predicate"], Literal("test_object3")),
|
|
342
|
+
]
|
|
343
|
+
)
|
|
344
|
+
result = await mem_repo.query("SELECT * WHERE { ?s ?p ?o } LIMIT 2")
|
|
345
|
+
result_list = list(result)
|
|
346
|
+
assert len(result_list) == 2
|
|
347
|
+
assert result_list[0]["s"] == ex["subject1"]
|
|
348
|
+
assert result_list[0]["p"] == ex["predicate"]
|
|
349
|
+
assert result_list[0]["o"] == Literal("test_object1")
|
|
350
|
+
assert result_list[1]["s"] == ex["subject2"]
|
|
351
|
+
assert result_list[1]["p"] == ex["predicate"]
|
|
352
|
+
assert result_list[1]["o"] == Literal("test_object2")
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
@pytest.mark.asyncio
|
|
356
|
+
async def test_repo_update(mem_repo: AsyncRdf4JRepository):
|
|
357
|
+
await mem_repo.update(
|
|
358
|
+
'INSERT DATA { <http://example.org/subject1> <http://example.org/predicate> "test_object1" }',
|
|
359
|
+
Rdf4jContentType.SPARQL_UPDATE,
|
|
360
|
+
)
|
|
361
|
+
result = await mem_repo.query("SELECT * WHERE { ?s ?p ?o }")
|
|
362
|
+
result_list = list(result)
|
|
363
|
+
assert len(result_list) == 1
|
|
364
|
+
assert result_list[0]["s"] == ex["subject1"]
|
|
365
|
+
assert result_list[0]["p"] == ex["predicate"]
|
|
366
|
+
assert result_list[0]["o"] == Literal("test_object1")
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
@pytest.mark.asyncio
|
|
370
|
+
async def test_repo_update_not_found(rdf4j_service: str):
|
|
371
|
+
from rdf4j_python import AsyncRdf4j
|
|
372
|
+
|
|
373
|
+
async with AsyncRdf4j(rdf4j_service) as db:
|
|
374
|
+
repo = await db.get_repository("not_found")
|
|
375
|
+
with pytest.raises(RepositoryNotFoundException):
|
|
376
|
+
await repo.update(
|
|
377
|
+
"INSERT DATA { <http://example.org/subject1> <http://example.org/predicate> 'test_object1' }",
|
|
378
|
+
Rdf4jContentType.SPARQL_UPDATE,
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
@pytest.mark.asyncio
|
|
383
|
+
async def test_repo_update_invalid_query(mem_repo: AsyncRdf4JRepository):
|
|
384
|
+
with pytest.raises(RepositoryUpdateException):
|
|
385
|
+
await mem_repo.update(
|
|
386
|
+
"INSERT D <http://example.org/subject1> <http://example.org/predicate> 'test_object1' }",
|
|
387
|
+
Rdf4jContentType.SPARQL_UPDATE,
|
|
388
|
+
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|