rdf4j-python 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rdf4j_python-0.1.0/PKG-INFO +8 -0
- rdf4j_python-0.1.0/README.md +0 -0
- rdf4j_python-0.1.0/pyproject.toml +21 -0
- rdf4j_python-0.1.0/rdf4j_python/__init__.py +9 -0
- rdf4j_python-0.1.0/rdf4j_python/_client/__init__.py +3 -0
- rdf4j_python-0.1.0/rdf4j_python/_client/_client.py +102 -0
- rdf4j_python-0.1.0/rdf4j_python/_driver/__init__.py +7 -0
- rdf4j_python-0.1.0/rdf4j_python/_driver/_async_rdf4j_db.py +101 -0
- rdf4j_python-0.1.0/rdf4j_python/_driver/_async_repository.py +51 -0
- rdf4j_python-0.1.0/rdf4j_python/exception/repo_exception.py +4 -0
- rdf4j_python-0.1.0/rdf4j_python/model/_base_model.py +26 -0
- rdf4j_python-0.1.0/rdf4j_python/model/repository.py +41 -0
- rdf4j_python-0.1.0/rdf4j_python/utils/__init__.py +0 -0
- rdf4j_python-0.1.0/rdf4j_python/utils/const.py +26 -0
- rdf4j_python-0.1.0/rdf4j_python.egg-info/PKG-INFO +8 -0
- rdf4j_python-0.1.0/rdf4j_python.egg-info/SOURCES.txt +19 -0
- rdf4j_python-0.1.0/rdf4j_python.egg-info/dependency_links.txt +1 -0
- rdf4j_python-0.1.0/rdf4j_python.egg-info/requires.txt +2 -0
- rdf4j_python-0.1.0/rdf4j_python.egg-info/top_level.txt +1 -0
- rdf4j_python-0.1.0/setup.cfg +4 -0
- rdf4j_python-0.1.0/tests/test_repository.py +75 -0
|
File without changes
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "rdf4j-python"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "The Python client for RDF4J"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.10"
|
|
7
|
+
dependencies = ["httpx>=0.28.1", "rdflib>=7.1.4"]
|
|
8
|
+
|
|
9
|
+
[dependency-groups]
|
|
10
|
+
dev = [
|
|
11
|
+
"pytest>=8.3.5",
|
|
12
|
+
"pytest-asyncio>=0.26.0",
|
|
13
|
+
"pytest-docker>=3.2.1",
|
|
14
|
+
"ruff>=0.11.8",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[tool.pytest.ini_options]
|
|
18
|
+
log_cli = true
|
|
19
|
+
log_cli_level = "INFO"
|
|
20
|
+
log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)"
|
|
21
|
+
log_cli_date_format = "%Y-%m-%d %H:%M:%S"
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from typing import Any, Dict, Optional
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BaseClient:
|
|
7
|
+
def __init__(self, base_url: str, timeout: int = 10):
|
|
8
|
+
self.base_url = base_url.rstrip("/")
|
|
9
|
+
self.timeout = timeout
|
|
10
|
+
|
|
11
|
+
def _build_url(self, path: str) -> str:
|
|
12
|
+
return f"{self.base_url}/{path.lstrip('/')}"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SyncApiClient(BaseClient):
|
|
16
|
+
def __enter__(self):
|
|
17
|
+
self.client = httpx.Client(timeout=self.timeout).__enter__()
|
|
18
|
+
return self
|
|
19
|
+
|
|
20
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
21
|
+
self.client.__exit__(exc_type, exc_value, traceback)
|
|
22
|
+
|
|
23
|
+
def get(
|
|
24
|
+
self,
|
|
25
|
+
path: str,
|
|
26
|
+
params: Optional[Dict[str, Any]] = None,
|
|
27
|
+
headers: Optional[Dict[str, str]] = None,
|
|
28
|
+
) -> httpx.Response:
|
|
29
|
+
return self.client.get(self._build_url(path), params=params, headers=headers)
|
|
30
|
+
|
|
31
|
+
def post(
|
|
32
|
+
self,
|
|
33
|
+
path: str,
|
|
34
|
+
data: Optional[Dict[str, Any]] = None,
|
|
35
|
+
json: Optional[Any] = None,
|
|
36
|
+
headers: Optional[Dict[str, str]] = None,
|
|
37
|
+
) -> httpx.Response:
|
|
38
|
+
return self.client.post(
|
|
39
|
+
self._build_url(path), data=data, json=json, headers=headers
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
def put(
|
|
43
|
+
self,
|
|
44
|
+
path: str,
|
|
45
|
+
data: Optional[Dict[str, Any]] = None,
|
|
46
|
+
json: Optional[Any] = None,
|
|
47
|
+
headers: Optional[Dict[str, str]] = None,
|
|
48
|
+
) -> httpx.Response:
|
|
49
|
+
return self.client.put(
|
|
50
|
+
self._build_url(path), data=data, json=json, headers=headers
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def delete(
|
|
54
|
+
self, path: str, headers: Optional[Dict[str, str]] = None
|
|
55
|
+
) -> httpx.Response:
|
|
56
|
+
return self.client.delete(self._build_url(path), headers=headers)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class AsyncApiClient(BaseClient):
|
|
60
|
+
async def __aenter__(self):
|
|
61
|
+
self.client = await httpx.AsyncClient(timeout=self.timeout).__aenter__()
|
|
62
|
+
return self
|
|
63
|
+
|
|
64
|
+
async def __aexit__(self, exc_type, exc_value, traceback):
|
|
65
|
+
await self.client.__aexit__(exc_type, exc_value, traceback)
|
|
66
|
+
|
|
67
|
+
async def get(
|
|
68
|
+
self,
|
|
69
|
+
path: str,
|
|
70
|
+
params: Optional[Dict[str, Any]] = None,
|
|
71
|
+
headers: Optional[Dict[str, str]] = None,
|
|
72
|
+
) -> httpx.Response:
|
|
73
|
+
return await self.client.get(
|
|
74
|
+
self._build_url(path), params=params, headers=headers
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
async def post(
|
|
78
|
+
self,
|
|
79
|
+
path: str,
|
|
80
|
+
data: Optional[Dict[str, Any]] = None,
|
|
81
|
+
json: Optional[Any] = None,
|
|
82
|
+
headers: Optional[Dict[str, str]] = None,
|
|
83
|
+
) -> httpx.Response:
|
|
84
|
+
return await self.client.post(
|
|
85
|
+
self._build_url(path), data=data, json=json, headers=headers
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
async def put(
|
|
89
|
+
self,
|
|
90
|
+
path: str,
|
|
91
|
+
content: Optional[bytes] = None,
|
|
92
|
+
json: Optional[Any] = None,
|
|
93
|
+
headers: Optional[Dict[str, str]] = None,
|
|
94
|
+
) -> httpx.Response:
|
|
95
|
+
return await self.client.put(
|
|
96
|
+
self._build_url(path), content=content, json=json, headers=headers
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
async def delete(
|
|
100
|
+
self, path: str, headers: Optional[Dict[str, str]] = None
|
|
101
|
+
) -> httpx.Response:
|
|
102
|
+
return await self.client.delete(self._build_url(path), headers=headers)
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
import rdflib
|
|
5
|
+
|
|
6
|
+
from rdf4j_python import AsyncApiClient
|
|
7
|
+
from rdf4j_python.exception.repo_exception import (
|
|
8
|
+
RepositoryCreationException,
|
|
9
|
+
RepositoryDeletionException,
|
|
10
|
+
)
|
|
11
|
+
from rdf4j_python.model.repository import RepositoryInfo
|
|
12
|
+
from rdf4j_python.utils.const import Rdf4jContentType
|
|
13
|
+
|
|
14
|
+
from ._async_repository import AsyncRepository
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AsyncRdf4jDB:
|
|
18
|
+
_client: AsyncApiClient
|
|
19
|
+
_base_url: str
|
|
20
|
+
|
|
21
|
+
def __init__(self, base_url: str):
|
|
22
|
+
self._base_url = base_url.rstrip("/")
|
|
23
|
+
|
|
24
|
+
async def __aenter__(self):
|
|
25
|
+
self._client = await AsyncApiClient(base_url=self._base_url).__aenter__()
|
|
26
|
+
return self
|
|
27
|
+
|
|
28
|
+
async def __aexit__(self, exc_type, exc_value, traceback):
|
|
29
|
+
await self._client.__aexit__(exc_type, exc_value, traceback)
|
|
30
|
+
|
|
31
|
+
async def get_protocol_version(self) -> str:
|
|
32
|
+
response = await self._client.get("/protocol")
|
|
33
|
+
response.raise_for_status()
|
|
34
|
+
return response.text
|
|
35
|
+
|
|
36
|
+
async def list_repositories(self) -> list[RepositoryInfo]:
|
|
37
|
+
"""
|
|
38
|
+
List all RDF4J repositories.
|
|
39
|
+
|
|
40
|
+
:return: List of repository information.
|
|
41
|
+
"""
|
|
42
|
+
response = await self._client.get(
|
|
43
|
+
"/repositories",
|
|
44
|
+
headers={"Accept": Rdf4jContentType.SPARQL_RESULTS_JSON},
|
|
45
|
+
)
|
|
46
|
+
result = rdflib.query.Result.parse(
|
|
47
|
+
response, format=Rdf4jContentType.SPARQL_RESULTS_JSON
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
return [
|
|
51
|
+
RepositoryInfo.from_rdflib_binding(binding) for binding in result.bindings
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
async def get_repository(self, repository_id: str) -> AsyncRepository:
|
|
55
|
+
"""
|
|
56
|
+
Get an AsyncRepository instance for the specified repository ID.
|
|
57
|
+
|
|
58
|
+
:param repository_id: The ID of the repository.
|
|
59
|
+
:return: An instance of AsyncRepository.
|
|
60
|
+
"""
|
|
61
|
+
return AsyncRepository(self._client, repository_id)
|
|
62
|
+
|
|
63
|
+
async def create_repository(
|
|
64
|
+
self,
|
|
65
|
+
repository_id: str,
|
|
66
|
+
rdf_config_data: str,
|
|
67
|
+
content_type: Union[Rdf4jContentType, str] = Rdf4jContentType.TURTLE,
|
|
68
|
+
):
|
|
69
|
+
"""
|
|
70
|
+
Create a new RDF4J repository.
|
|
71
|
+
|
|
72
|
+
:param repository_id: Repository ID to create.
|
|
73
|
+
:param rdf_config_data: RDF config in Turtle, RDF/XML, etc.
|
|
74
|
+
:param content_type: MIME type of RDF config.
|
|
75
|
+
"""
|
|
76
|
+
path = f"/repositories/{repository_id}"
|
|
77
|
+
|
|
78
|
+
if isinstance(content_type, Rdf4jContentType):
|
|
79
|
+
content_type = content_type.value
|
|
80
|
+
headers = {"Content-Type": content_type}
|
|
81
|
+
|
|
82
|
+
response: httpx.Response = await self._client.put(
|
|
83
|
+
path, content=rdf_config_data, headers=headers
|
|
84
|
+
)
|
|
85
|
+
if response.status_code != httpx.codes.NO_CONTENT:
|
|
86
|
+
raise RepositoryCreationException(
|
|
87
|
+
f"Repository creation failed: {response.status_code} - {response.text}"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
async def delete_repository(self, repository_id: str):
|
|
91
|
+
"""
|
|
92
|
+
Delete an RDF4J repository and its data/config.
|
|
93
|
+
|
|
94
|
+
:param repository_id: The repository ID to delete.
|
|
95
|
+
"""
|
|
96
|
+
path = f"/repositories/{repository_id}"
|
|
97
|
+
response = await self._client.delete(path)
|
|
98
|
+
if response.status_code != httpx.codes.NO_CONTENT:
|
|
99
|
+
raise RepositoryDeletionException(
|
|
100
|
+
f"Failed to delete repository '{repository_id}': {response.status_code} - {response.text}"
|
|
101
|
+
)
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from rdf4j_python import AsyncApiClient
|
|
2
|
+
from rdf4j_python.utils.const import Rdf4jContentType
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class AsyncRepository:
|
|
6
|
+
def __init__(self, client: AsyncApiClient, repository_id: str):
|
|
7
|
+
self._client = client
|
|
8
|
+
self._repository_id = repository_id
|
|
9
|
+
|
|
10
|
+
async def query(
|
|
11
|
+
self,
|
|
12
|
+
sparql_query: str,
|
|
13
|
+
infer: bool = True,
|
|
14
|
+
accept: Rdf4jContentType = Rdf4jContentType.SPARQL_RESULTS_JSON,
|
|
15
|
+
):
|
|
16
|
+
path = f"/repositories/{self._repository_id}"
|
|
17
|
+
params = {"query": sparql_query, "infer": str(infer).lower()}
|
|
18
|
+
headers = {"Accept": accept.value}
|
|
19
|
+
response = await self._client.get(path, params=params, headers=headers)
|
|
20
|
+
if "json" in response.headers.get("Content-Type", ""):
|
|
21
|
+
return response.json()
|
|
22
|
+
return response.text
|
|
23
|
+
|
|
24
|
+
async def update(self, sparql_update: str):
|
|
25
|
+
path = f"/repositories/{self._repository_id}/statements"
|
|
26
|
+
headers = {"Content-Type": Rdf4jContentType.SPARQL_UPDATE.value}
|
|
27
|
+
response = await self._client.post(path, data=sparql_update, headers=headers)
|
|
28
|
+
response.raise_for_status()
|
|
29
|
+
|
|
30
|
+
async def replace_statements(
|
|
31
|
+
self, rdf_data: str, content_type: Rdf4jContentType = Rdf4jContentType.TURTLE
|
|
32
|
+
):
|
|
33
|
+
path = f"/repositories/{self._repository_id}/statements"
|
|
34
|
+
headers = {"Content-Type": content_type.value}
|
|
35
|
+
response = await self._client.put(path, data=rdf_data, headers=headers)
|
|
36
|
+
response.raise_for_status()
|
|
37
|
+
|
|
38
|
+
async def get_namespaces(self):
|
|
39
|
+
path = f"/repositories/{self._repository_id}/namespaces"
|
|
40
|
+
response = await self._client.get(path)
|
|
41
|
+
if Rdf4jContentType.SPARQL_RESULTS_JSON in response.headers.get(
|
|
42
|
+
"Content-Type", ""
|
|
43
|
+
):
|
|
44
|
+
return response.json()
|
|
45
|
+
return response.text
|
|
46
|
+
|
|
47
|
+
async def set_namespace(self, prefix: str, namespace: str):
|
|
48
|
+
path = f"/repositories/{self._repository_id}/namespaces/{prefix}"
|
|
49
|
+
headers = {"Content-Type": Rdf4jContentType.NTRIPLES.value}
|
|
50
|
+
response = await self._client.put(path, data=namespace, headers=headers)
|
|
51
|
+
response.raise_for_status()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from abc import ABC
|
|
2
|
+
from typing import Mapping, Optional
|
|
3
|
+
|
|
4
|
+
from rdflib.term import Identifier, Literal, URIRef, Variable
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class _BaseModel(ABC):
|
|
8
|
+
@staticmethod
|
|
9
|
+
def get_literal(
|
|
10
|
+
result: Mapping[Variable, Identifier],
|
|
11
|
+
var_name: str,
|
|
12
|
+
default: Optional[str] = None,
|
|
13
|
+
):
|
|
14
|
+
"""Extract and convert an RDFLib Literal to a Python value."""
|
|
15
|
+
val = result.get(Variable(var_name))
|
|
16
|
+
return val.toPython() if isinstance(val, Literal) else default
|
|
17
|
+
|
|
18
|
+
@staticmethod
|
|
19
|
+
def get_uri(
|
|
20
|
+
result: Mapping[Variable, Identifier],
|
|
21
|
+
var_name: str,
|
|
22
|
+
default: Optional[str] = None,
|
|
23
|
+
):
|
|
24
|
+
"""Extract and convert an RDFLib URIRef to a string."""
|
|
25
|
+
val = result.get(Variable(var_name))
|
|
26
|
+
return str(val) if isinstance(val, URIRef) else default
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Mapping
|
|
3
|
+
|
|
4
|
+
from rdflib.term import Identifier, Variable
|
|
5
|
+
|
|
6
|
+
from ._base_model import _BaseModel
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class RepositoryInfo(_BaseModel):
|
|
11
|
+
"""
|
|
12
|
+
Represents a repository information RDF4J.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
id: str # The repository identifier
|
|
16
|
+
uri: str # The full URI to the repository
|
|
17
|
+
title: str # A human-readable title (currently reusing id)
|
|
18
|
+
readable: bool # Whether the repository is readable
|
|
19
|
+
writable: bool # Whether the repository is writable
|
|
20
|
+
|
|
21
|
+
def __str__(self):
|
|
22
|
+
# Custom string representation for easy printing
|
|
23
|
+
return f"Repository(id={self.id}, title={self.title}, uri={self.uri})"
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def from_rdflib_binding(
|
|
27
|
+
cls, result: Mapping[Variable, Identifier]
|
|
28
|
+
) -> "RepositoryInfo":
|
|
29
|
+
"""
|
|
30
|
+
Create a Repository instance from a SPARQL query result
|
|
31
|
+
represented as a Mapping from rdflib Variables to Identifiers.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
# Construct and return the Repository object
|
|
35
|
+
return cls(
|
|
36
|
+
id=_BaseModel.get_literal(result, "id", ""),
|
|
37
|
+
uri=_BaseModel.get_uri(result, "uri", ""),
|
|
38
|
+
title=_BaseModel.get_literal(result, "title", ""),
|
|
39
|
+
readable=_BaseModel.get_literal(result, "readable", False),
|
|
40
|
+
writable=_BaseModel.get_literal(result, "writable", False),
|
|
41
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class Rdf4jContentType(str, Enum):
|
|
5
|
+
# SPARQL Query Results
|
|
6
|
+
SPARQL_RESULTS_JSON = "application/sparql-results+json"
|
|
7
|
+
SPARQL_RESULTS_XML = "application/sparql-results+xml"
|
|
8
|
+
SPARQL_RESULTS_BINARY = "application/x-binary-rdf-results-table"
|
|
9
|
+
BOOLEAN_TEXT = "text/boolean"
|
|
10
|
+
|
|
11
|
+
# RDF Serialization Formats
|
|
12
|
+
RDF_XML = "application/rdf+xml"
|
|
13
|
+
RDF_JSON = "application/rdf+json"
|
|
14
|
+
LD_JSON = "application/ld+json"
|
|
15
|
+
NTRIPLES = "text/plain"
|
|
16
|
+
TURTLE = "text/turtle"
|
|
17
|
+
N3 = "text/rdf+n3"
|
|
18
|
+
NQUADS = "text/x-nquads"
|
|
19
|
+
TRIG = "application/x-trig"
|
|
20
|
+
TRIX = "application/trix"
|
|
21
|
+
BINARY_RDF = "application/x-binary-rdf"
|
|
22
|
+
|
|
23
|
+
# SPARQL-specific
|
|
24
|
+
SPARQL_QUERY = "application/sparql-query"
|
|
25
|
+
SPARQL_UPDATE = "application/sparql-update"
|
|
26
|
+
FORM_URLENCODED = "application/x-www-form-urlencoded"
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
rdf4j_python/__init__.py
|
|
4
|
+
rdf4j_python.egg-info/PKG-INFO
|
|
5
|
+
rdf4j_python.egg-info/SOURCES.txt
|
|
6
|
+
rdf4j_python.egg-info/dependency_links.txt
|
|
7
|
+
rdf4j_python.egg-info/requires.txt
|
|
8
|
+
rdf4j_python.egg-info/top_level.txt
|
|
9
|
+
rdf4j_python/_client/__init__.py
|
|
10
|
+
rdf4j_python/_client/_client.py
|
|
11
|
+
rdf4j_python/_driver/__init__.py
|
|
12
|
+
rdf4j_python/_driver/_async_rdf4j_db.py
|
|
13
|
+
rdf4j_python/_driver/_async_repository.py
|
|
14
|
+
rdf4j_python/exception/repo_exception.py
|
|
15
|
+
rdf4j_python/model/_base_model.py
|
|
16
|
+
rdf4j_python/model/repository.py
|
|
17
|
+
rdf4j_python/utils/__init__.py
|
|
18
|
+
rdf4j_python/utils/const.py
|
|
19
|
+
tests/test_repository.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
rdf4j_python
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from rdf4j_python import AsyncRdf4jDB
|
|
4
|
+
from rdf4j_python.utils.const import Rdf4jContentType
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_repo_config(name: str):
|
|
8
|
+
return f"""
|
|
9
|
+
@prefix config: <tag:rdf4j.org,2023:config/>.
|
|
10
|
+
|
|
11
|
+
[] a config:Repository ;
|
|
12
|
+
config:rep.id "{name}" ;
|
|
13
|
+
rdfs:label "{name}" ;
|
|
14
|
+
config:rep.impl [
|
|
15
|
+
config:rep.type "openrdf:SailRepository" ;
|
|
16
|
+
config:sail.impl [
|
|
17
|
+
config:sail.type "openrdf:MemoryStore" ;
|
|
18
|
+
]
|
|
19
|
+
] .
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@pytest.mark.asyncio
|
|
24
|
+
async def test_create_repo(rdf4j_service: str):
|
|
25
|
+
async with AsyncRdf4jDB(rdf4j_service) as db:
|
|
26
|
+
repo_id = "test_create_repo"
|
|
27
|
+
await db.create_repository(
|
|
28
|
+
repository_id=repo_id,
|
|
29
|
+
rdf_config_data=get_repo_config(repo_id),
|
|
30
|
+
content_type=Rdf4jContentType.TURTLE,
|
|
31
|
+
)
|
|
32
|
+
repos = await db.list_repositories()
|
|
33
|
+
assert len(repos) == 1
|
|
34
|
+
assert repos[0].id == repo_id
|
|
35
|
+
assert repos[0].title == repo_id
|
|
36
|
+
await db.delete_repository(repo_id)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@pytest.mark.asyncio
|
|
40
|
+
async def test_delete_repo(rdf4j_service: str):
|
|
41
|
+
async with AsyncRdf4jDB(rdf4j_service) as db:
|
|
42
|
+
repo_id = "test_delete_repo"
|
|
43
|
+
await db.create_repository(
|
|
44
|
+
repository_id=repo_id,
|
|
45
|
+
rdf_config_data=get_repo_config(repo_id),
|
|
46
|
+
content_type=Rdf4jContentType.TURTLE,
|
|
47
|
+
)
|
|
48
|
+
repos = await db.list_repositories()
|
|
49
|
+
assert len(repos) == 1
|
|
50
|
+
assert repos[0].id == repo_id
|
|
51
|
+
assert repos[0].title == repo_id
|
|
52
|
+
await db.delete_repository(repo_id)
|
|
53
|
+
repos = await db.list_repositories()
|
|
54
|
+
assert len(repos) == 0
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@pytest.mark.asyncio
|
|
58
|
+
async def test_list_repos(rdf4j_service: str):
|
|
59
|
+
async with AsyncRdf4jDB(rdf4j_service) as db:
|
|
60
|
+
repo_count = 10
|
|
61
|
+
repos = await db.list_repositories()
|
|
62
|
+
assert len(repos) == 0
|
|
63
|
+
for repo in range(repo_count):
|
|
64
|
+
repo_id = f"test_list_repos_{repo}"
|
|
65
|
+
await db.create_repository(
|
|
66
|
+
repository_id=repo_id,
|
|
67
|
+
rdf_config_data=get_repo_config(repo_id),
|
|
68
|
+
content_type=Rdf4jContentType.TURTLE,
|
|
69
|
+
)
|
|
70
|
+
repos = await db.list_repositories()
|
|
71
|
+
assert len(repos) == repo_count
|
|
72
|
+
for repo in range(repo_count):
|
|
73
|
+
repo_id = f"test_list_repos_{repo}"
|
|
74
|
+
assert repo_id in [repo.id for repo in repos]
|
|
75
|
+
assert repo_id in [repo.title for repo in repos]
|