cmem-client 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cmem_client/__init__.py +13 -0
- cmem_client/auth_provider/__init__.py +14 -0
- cmem_client/auth_provider/abc.py +124 -0
- cmem_client/auth_provider/client_credentials.py +207 -0
- cmem_client/auth_provider/password.py +252 -0
- cmem_client/auth_provider/prefetched_token.py +153 -0
- cmem_client/client.py +485 -0
- cmem_client/components/__init__.py +10 -0
- cmem_client/components/graph_store.py +316 -0
- cmem_client/components/marketplace.py +179 -0
- cmem_client/components/sparql_wrapper.py +53 -0
- cmem_client/components/workspace.py +194 -0
- cmem_client/config.py +364 -0
- cmem_client/exceptions.py +82 -0
- cmem_client/logging_utils.py +49 -0
- cmem_client/models/__init__.py +16 -0
- cmem_client/models/access_condition.py +147 -0
- cmem_client/models/base.py +30 -0
- cmem_client/models/dataset.py +32 -0
- cmem_client/models/error.py +67 -0
- cmem_client/models/graph.py +26 -0
- cmem_client/models/item.py +143 -0
- cmem_client/models/logging_config.py +51 -0
- cmem_client/models/package.py +35 -0
- cmem_client/models/project.py +46 -0
- cmem_client/models/python_package.py +26 -0
- cmem_client/models/token.py +40 -0
- cmem_client/models/url.py +34 -0
- cmem_client/models/workflow.py +80 -0
- cmem_client/repositories/__init__.py +15 -0
- cmem_client/repositories/access_conditions.py +62 -0
- cmem_client/repositories/base/__init__.py +12 -0
- cmem_client/repositories/base/abc.py +138 -0
- cmem_client/repositories/base/paged_list.py +63 -0
- cmem_client/repositories/base/plain_list.py +39 -0
- cmem_client/repositories/base/task_search.py +70 -0
- cmem_client/repositories/datasets.py +36 -0
- cmem_client/repositories/graph_imports.py +93 -0
- cmem_client/repositories/graphs.py +458 -0
- cmem_client/repositories/marketplace_packages.py +486 -0
- cmem_client/repositories/projects.py +214 -0
- cmem_client/repositories/protocols/__init__.py +15 -0
- cmem_client/repositories/protocols/create_item.py +125 -0
- cmem_client/repositories/protocols/delete_item.py +95 -0
- cmem_client/repositories/protocols/export_item.py +114 -0
- cmem_client/repositories/protocols/import_item.py +141 -0
- cmem_client/repositories/python_packages.py +58 -0
- cmem_client/repositories/workflows.py +143 -0
- cmem_client-0.5.0.dist-info/METADATA +64 -0
- cmem_client-0.5.0.dist-info/RECORD +52 -0
- cmem_client-0.5.0.dist-info/WHEEL +4 -0
- cmem_client-0.5.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
"""Corporate Memory DataPlatform (explore) graph store management.
|
|
2
|
+
|
|
3
|
+
This module provides the GraphStore component for managing Corporate Memory's
|
|
4
|
+
DataPlatform graph store. The graph store is the primary repository for RDF
|
|
5
|
+
data and knowledge graphs, supporting semantic queries and exploration.
|
|
6
|
+
|
|
7
|
+
The GraphStore component provides high-level administrative operations including
|
|
8
|
+
bootstrap data management, full store backup and restoration, and system
|
|
9
|
+
information retrieval. These operations are essential for store maintenance,
|
|
10
|
+
deployment, and operational monitoring.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
from typing import TYPE_CHECKING
|
|
17
|
+
|
|
18
|
+
from cmem_client.logging_utils import log_method
|
|
19
|
+
from cmem_client.models.base import Model
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from httpx import Response
|
|
25
|
+
|
|
26
|
+
from cmem_client.client import Client
|
|
27
|
+
|
|
28
|
+
from cmem_client.components.sparql_wrapper import SPARQLWrapper
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class StoreInformation(Model):
|
|
32
|
+
"""Information about the graph store instance and its capabilities.
|
|
33
|
+
|
|
34
|
+
This model represents metadata about the DataPlatform graph store,
|
|
35
|
+
including the store type and version information. This information
|
|
36
|
+
is useful for compatibility checks, monitoring, and debugging.
|
|
37
|
+
|
|
38
|
+
Attributes:
|
|
39
|
+
type: The type of graph store (e.g., "GRAPHDB", "TENTRIS").
|
|
40
|
+
version: The version string of the graph store implementation.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
type: str
|
|
44
|
+
"""The type/implementation of the graph store (e.g., "GRAPHDB", "TENTRIS")."""
|
|
45
|
+
|
|
46
|
+
version: str
|
|
47
|
+
"""The version string of the graph store implementation."""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class GraphStore:
|
|
51
|
+
"""High-level interface for Corporate Memory DataPlatform graph store operations.
|
|
52
|
+
|
|
53
|
+
The GraphStore component provides administrative and operational methods for
|
|
54
|
+
managing the Corporate Memory DataPlatform graph store. It handles store-level
|
|
55
|
+
operations including bootstrap data management, full backup and restoration,
|
|
56
|
+
and system information retrieval.
|
|
57
|
+
|
|
58
|
+
This component abstracts the complexities of the DataPlatform API and provides
|
|
59
|
+
a convenient interface for common graph store management tasks. It's designed
|
|
60
|
+
for administrative operations rather than individual graph manipulation
|
|
61
|
+
(use repositories for graph-level operations).
|
|
62
|
+
|
|
63
|
+
Attributes:
|
|
64
|
+
_client: The Corporate Memory client instance used for API communication.
|
|
65
|
+
_sparql_wrapper: SPARQLWrapper instance for rdflib SPARQL queries.
|
|
66
|
+
|
|
67
|
+
Administrative Operations:
|
|
68
|
+
- Full store backup and restoration
|
|
69
|
+
- Bootstrap data management (system vocabularies, etc.)
|
|
70
|
+
|
|
71
|
+
See Also:
|
|
72
|
+
For individual graph operations, use the repositories.graphs module
|
|
73
|
+
which provides CRUD operations for specific RDF graphs.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
_client: Client
|
|
77
|
+
"""The Corporate Memory client instance used for making API requests to the DataPlatform."""
|
|
78
|
+
|
|
79
|
+
_sparql_wrapper: SPARQLWrapper
|
|
80
|
+
"""SPARQLWrapper instance for executing SPARQL queries with rdflib."""
|
|
81
|
+
|
|
82
|
+
def __init__(self, client: Client) -> None:
|
|
83
|
+
"""Initialize a new GraphStore component instance.
|
|
84
|
+
|
|
85
|
+
Creates a GraphStore component that uses the provided client for
|
|
86
|
+
API communication with the DataPlatform graph store.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
client: A configured Corporate Memory client instance with
|
|
90
|
+
authentication and endpoint configuration.
|
|
91
|
+
|
|
92
|
+
Note:
|
|
93
|
+
This constructor is typically called automatically by the
|
|
94
|
+
Client class when accessing the store property. Direct
|
|
95
|
+
instantiation is rarely needed in normal usage.
|
|
96
|
+
"""
|
|
97
|
+
self._client = client
|
|
98
|
+
self.logger = logging.getLogger(f"{self._client.logger.name}.{self.__class__.__name__}")
|
|
99
|
+
|
|
100
|
+
@log_method
|
|
101
|
+
def import_bootstrap_data(self) -> None:
|
|
102
|
+
"""Import or update bootstrap data in the graph store.
|
|
103
|
+
|
|
104
|
+
Bootstrap data includes system vocabularies, ontologies, and other
|
|
105
|
+
foundational RDF data required for proper Corporate Memory operation.
|
|
106
|
+
This operation ensures the store contains all necessary system-level
|
|
107
|
+
graphs and vocabularies.
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
HTTPError: If the bootstrap import request fails due to network
|
|
111
|
+
issues or server errors.
|
|
112
|
+
|
|
113
|
+
Note:
|
|
114
|
+
This operation may take some time.
|
|
115
|
+
It's typically performed during system initialization or when
|
|
116
|
+
updating to new Corporate Memory versions that include new
|
|
117
|
+
system vocabularies.
|
|
118
|
+
|
|
119
|
+
Use Cases:
|
|
120
|
+
- Initial system setup
|
|
121
|
+
- System updates with new vocabularies
|
|
122
|
+
- Recovery after bootstrap data corruption
|
|
123
|
+
"""
|
|
124
|
+
path = "/api/admin/bootstrap"
|
|
125
|
+
url = self._client.config.url_explore_api / path
|
|
126
|
+
self._client.http.post(url=url, data={})
|
|
127
|
+
|
|
128
|
+
def delete_bootstrap_data(self) -> None:
|
|
129
|
+
"""Delete bootstrap data from the graph store.
|
|
130
|
+
|
|
131
|
+
Warning: This operation removes system vocabularies and foundational
|
|
132
|
+
RDF data required for proper Corporate Memory operation. Use with extreme caution.
|
|
133
|
+
|
|
134
|
+
Removes all bootstrap data including system vocabularies, ontologies,
|
|
135
|
+
and other foundational RDF graphs. This is typically used for cleanup
|
|
136
|
+
during testing, or system reset.
|
|
137
|
+
|
|
138
|
+
Raises:
|
|
139
|
+
HTTPError: If the bootstrap deletion request fails due to network
|
|
140
|
+
issues or server errors.
|
|
141
|
+
|
|
142
|
+
Caution:
|
|
143
|
+
After deleting bootstrap data, the Corporate Memory system may not
|
|
144
|
+
function correctly until new bootstrap data is imported. This
|
|
145
|
+
operation should typically be followed by import_bootstrap_data().
|
|
146
|
+
|
|
147
|
+
Use Cases:
|
|
148
|
+
- System reset during testing
|
|
149
|
+
- Troubleshooting corrupted system vocabularies
|
|
150
|
+
- Development environment reset
|
|
151
|
+
"""
|
|
152
|
+
self.logger.info("Deleting bootstrap data from the graph store...")
|
|
153
|
+
path = "/api/admin/bootstrap"
|
|
154
|
+
url = self._client.config.url_explore_api / path
|
|
155
|
+
self._client.http.delete(url=url)
|
|
156
|
+
|
|
157
|
+
@log_method
|
|
158
|
+
def export_to_zip(self, path: Path) -> None:
|
|
159
|
+
"""Export a complete backup of the graph store as a ZIP archive.
|
|
160
|
+
|
|
161
|
+
Creates a full backup of the entire Corporate Memory DataPlatform graph store,
|
|
162
|
+
including all RDF graphs, system vocabularies, and metadata. The backup is
|
|
163
|
+
streamed directly to the specified file path as a compressed ZIP archive.
|
|
164
|
+
|
|
165
|
+
This operation creates a point-in-time snapshot that can be used for:
|
|
166
|
+
- Disaster recovery and backup strategies
|
|
167
|
+
- Environment migration and cloning
|
|
168
|
+
- System maintenance and testing
|
|
169
|
+
- Data archival and compliance requirements
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
path: The file system path where the ZIP backup archive will be saved.
|
|
173
|
+
The path should include the .zip extension and the parent directory
|
|
174
|
+
must exist and be writable.
|
|
175
|
+
|
|
176
|
+
Raises:
|
|
177
|
+
HTTPError: If the backup request fails due to network issues, server
|
|
178
|
+
errors, or insufficient permissions.
|
|
179
|
+
OSError: If the specified path cannot be written to due to file system
|
|
180
|
+
permissions or disk space issues.
|
|
181
|
+
|
|
182
|
+
Performance Notes:
|
|
183
|
+
- The backup is streamed directly to disk to minimize memory usage
|
|
184
|
+
- Large stores may take significant time to back up completely
|
|
185
|
+
- Network bandwidth and storage I/O will impact backup duration
|
|
186
|
+
- The operation blocks until the entire backup is complete
|
|
187
|
+
|
|
188
|
+
Security Considerations:
|
|
189
|
+
- Backup files contain all graph data and should be stored securely
|
|
190
|
+
- Consider encryption for sensitive data in backup archives
|
|
191
|
+
- Ensure appropriate access controls on backup storage locations
|
|
192
|
+
- Backup files may contain authentication tokens or sensitive metadata
|
|
193
|
+
|
|
194
|
+
See Also:
|
|
195
|
+
Use import_from_zip() to restore from backup archives created by this method.
|
|
196
|
+
"""
|
|
197
|
+
response: Response
|
|
198
|
+
url = self._client.config.url_explore_api / "/api/admin/backup/zip"
|
|
199
|
+
with self._client.http.stream("GET", url=url) as response, path.open("wb") as file:
|
|
200
|
+
for data in response.iter_bytes():
|
|
201
|
+
file.write(data)
|
|
202
|
+
|
|
203
|
+
@log_method
|
|
204
|
+
def import_from_zip(self, path: Path) -> None:
|
|
205
|
+
"""Import and restore a complete graph store backup from a ZIP archive.
|
|
206
|
+
|
|
207
|
+
Warning: This operation replaces ALL existing data in the graph store.
|
|
208
|
+
All current graphs, vocabularies, and metadata will be permanently deleted
|
|
209
|
+
and replaced with the contents of the backup archive.
|
|
210
|
+
|
|
211
|
+
Restores a Corporate Memory DataPlatform graph store from a ZIP backup
|
|
212
|
+
archive created by export_to_zip(). The restoration process completely
|
|
213
|
+
replaces the current store contents with the archived data, effectively
|
|
214
|
+
rolling back the store to the state captured in the backup.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
path: The file system path to the ZIP backup archive to import.
|
|
218
|
+
The file must be a valid backup archive created by export_to_zip()
|
|
219
|
+
or compatible with the Corporate Memory backup format.
|
|
220
|
+
|
|
221
|
+
Raises:
|
|
222
|
+
HTTPError: If the restore request fails due to network issues, server
|
|
223
|
+
errors, insufficient permissions, or invalid backup format.
|
|
224
|
+
OSError: If the specified backup file cannot be read due to file system
|
|
225
|
+
permissions or if the file does not exist.
|
|
226
|
+
ValidationError: If the backup archive format is invalid or corrupted.
|
|
227
|
+
|
|
228
|
+
Important Warnings:
|
|
229
|
+
- **Data Loss**: All existing graphs and data will be permanently deleted
|
|
230
|
+
- **Downtime**: The store may be unavailable during the restoration process
|
|
231
|
+
- **Irreversible**: This operation cannot be undone without another backup
|
|
232
|
+
- **Compatibility**: Ensure backup compatibility with current store version
|
|
233
|
+
|
|
234
|
+
Performance Notes:
|
|
235
|
+
- Large backup archives may take significant time to restore
|
|
236
|
+
- The store will be unavailable during the restoration process
|
|
237
|
+
- Network bandwidth and storage I/O will impact restoration duration
|
|
238
|
+
- Memory usage is optimized through streaming file upload
|
|
239
|
+
|
|
240
|
+
Use Cases:
|
|
241
|
+
- Disaster recovery from catastrophic data loss
|
|
242
|
+
- Environment synchronization and cloning
|
|
243
|
+
- Rolling back to known good state after issues
|
|
244
|
+
- Migrating data between Corporate Memory instances
|
|
245
|
+
- Testing and development environment setup
|
|
246
|
+
|
|
247
|
+
See Also:
|
|
248
|
+
Use export_to_zip() to create backup archives for import with this method.
|
|
249
|
+
"""
|
|
250
|
+
url = self._client.config.url_explore_api / "/api/admin/restore/zip"
|
|
251
|
+
files = {"file": ("backup.zip", path.open("rb"), "application/zip")}
|
|
252
|
+
response = self._client.http.post(url=url, files=files)
|
|
253
|
+
response.raise_for_status()
|
|
254
|
+
|
|
255
|
+
@property
|
|
256
|
+
def self_information(self) -> StoreInformation:
|
|
257
|
+
"""Get metadata and version information about the graph store instance.
|
|
258
|
+
|
|
259
|
+
Retrieves information about the Corporate Memory DataPlatform
|
|
260
|
+
graph store, including the store implementation type and version.
|
|
261
|
+
|
|
262
|
+
The information is fetched from the store's actuator endpoint, which
|
|
263
|
+
provides real-time metadata about the running graph store instance.
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
StoreInformation: A model containing store type and version information.
|
|
267
|
+
The returned object includes the store implementation name
|
|
268
|
+
(e.g., "GRAPHDB", "TENTRIS") and its version string.
|
|
269
|
+
|
|
270
|
+
Raises:
|
|
271
|
+
HTTPError: If the information request fails due to network issues,
|
|
272
|
+
server errors, or insufficient permissions to access actuator endpoints.
|
|
273
|
+
ValidationError: If the response cannot be parsed as valid store
|
|
274
|
+
information due to unexpected response format.
|
|
275
|
+
|
|
276
|
+
Performance Notes:
|
|
277
|
+
- This property makes a live HTTP request on each access
|
|
278
|
+
- Consider caching the result if accessed frequently
|
|
279
|
+
- The actuator endpoint is typically lightweight and fast-responding
|
|
280
|
+
- Network latency will impact response time for this property
|
|
281
|
+
|
|
282
|
+
Security Notes:
|
|
283
|
+
- Actuator endpoints may reveal system information
|
|
284
|
+
- Ensure appropriate access controls on actuator endpoints
|
|
285
|
+
- Store version information should be treated as potentially sensitive
|
|
286
|
+
"""
|
|
287
|
+
url = self._client.config.url_explore_api / "/actuator/info"
|
|
288
|
+
content = self._client.http.get(url=url).json().get("store")
|
|
289
|
+
return StoreInformation(**content)
|
|
290
|
+
|
|
291
|
+
@property
|
|
292
|
+
def sparql(self) -> SPARQLWrapper:
|
|
293
|
+
"""Get a SPARQLWrapper instance for rdflib-based SPARQL queries.
|
|
294
|
+
|
|
295
|
+
Returns a SPARQLWrapper component configured with authentication
|
|
296
|
+
for executing SPARQL queries using rdflib. The wrapper provides
|
|
297
|
+
access to the Corporate Memory SPARQL endpoint with automatic
|
|
298
|
+
authentication handling.
|
|
299
|
+
|
|
300
|
+
Returns:
|
|
301
|
+
The SPARQLWrapper component instance, created lazily on first access.
|
|
302
|
+
|
|
303
|
+
Examples:
|
|
304
|
+
>>> client = Client.from_env()
|
|
305
|
+
>>> sparql_wrapper = client.store.sparql
|
|
306
|
+
>>> # Use with rdflib operations
|
|
307
|
+
"""
|
|
308
|
+
try:
|
|
309
|
+
return self._sparql_wrapper
|
|
310
|
+
except AttributeError:
|
|
311
|
+
sparql_endpoint = str(self._client.config.url_explore_api / "/proxy/default/sparql")
|
|
312
|
+
update_endpoint = str(self._client.config.url_explore_api / "/proxy/default/update")
|
|
313
|
+
self._sparql_wrapper = SPARQLWrapper(
|
|
314
|
+
sparql_endpoint=sparql_endpoint, update_endpoint=update_endpoint, client=self._client
|
|
315
|
+
)
|
|
316
|
+
return self._sparql_wrapper
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""eccenca Marketplace server integration.
|
|
2
|
+
|
|
3
|
+
This module provides the Marketplace component for interacting with the eccenca
|
|
4
|
+
Marketplace server. The component handles package downloads and version queries,
|
|
5
|
+
abstracting the marketplace REST API into a convenient Python interface.
|
|
6
|
+
|
|
7
|
+
The eccenca Marketplace is a central repository for distributing Corporate Memory
|
|
8
|
+
packages, including vocabularies, ontologies, and Python plugins. This component
|
|
9
|
+
enables automated package retrieval for installation and dependency resolution.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import logging
|
|
15
|
+
from typing import TYPE_CHECKING
|
|
16
|
+
|
|
17
|
+
from httpx import HTTPError
|
|
18
|
+
from xdg_base_dirs import xdg_cache_home
|
|
19
|
+
|
|
20
|
+
from cmem_client.exceptions import MarketplaceReadError
|
|
21
|
+
from cmem_client.logging_utils import log_method
|
|
22
|
+
from cmem_client.models.url import HttpUrl
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
|
|
27
|
+
from eccenca_marketplace_client.fields import PackageIdentifier, PackageVersionIdentifier
|
|
28
|
+
|
|
29
|
+
from cmem_client.client import Client
|
|
30
|
+
|
|
31
|
+
MARKETPLACE_CACHE_DIR = xdg_cache_home() / "eccenca-marketplace"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Marketplace:
|
|
35
|
+
"""Interface for eccenca Marketplace server operations.
|
|
36
|
+
|
|
37
|
+
The Marketplace component provides methods for downloading packages from the
|
|
38
|
+
eccenca Marketplace server. It handles version resolution, package retrieval,
|
|
39
|
+
and writing downloaded content to the filesystem.
|
|
40
|
+
|
|
41
|
+
Attributes:
|
|
42
|
+
_client: The Corporate Memory client instance used for HTTP communication.
|
|
43
|
+
_marketplace_url: Default marketplace server URL for package operations.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
_client: Client
|
|
47
|
+
"""The Corporate Memory client instance."""
|
|
48
|
+
|
|
49
|
+
_marketplace_url: HttpUrl
|
|
50
|
+
"""The Marketplace server URL for package operations."""
|
|
51
|
+
|
|
52
|
+
_cache_dir: Path | None
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
client: Client,
|
|
57
|
+
marketplace_url: HttpUrl | str = "https://marketplace.eccenca.dev/",
|
|
58
|
+
cache_dir: Path | None = MARKETPLACE_CACHE_DIR,
|
|
59
|
+
) -> None:
|
|
60
|
+
"""Initialize the Marketplace component.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
client: The Corporate Memory client instance.
|
|
64
|
+
marketplace_url: Default marketplace server URL. Defaults to the public eccenca Marketplace.
|
|
65
|
+
cache_dir: Directory to use for cached downloads. If set to None, caching is disabled.
|
|
66
|
+
"""
|
|
67
|
+
self._client = client
|
|
68
|
+
self.marketplace_url = marketplace_url
|
|
69
|
+
self.cache_dir = cache_dir
|
|
70
|
+
|
|
71
|
+
if self.cache_dir is not None:
|
|
72
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
73
|
+
|
|
74
|
+
self.logger = logging.getLogger(f"{self._client.logger.name}.{self.__class__.__name__}")
|
|
75
|
+
|
|
76
|
+
@log_method
|
|
77
|
+
def download_package(
|
|
78
|
+
self,
|
|
79
|
+
package_id: PackageIdentifier,
|
|
80
|
+
path: Path | None = None,
|
|
81
|
+
package_version: PackageVersionIdentifier | None = None,
|
|
82
|
+
use_cache: bool = True,
|
|
83
|
+
) -> Path:
|
|
84
|
+
"""Download a package from the marketplace server to a specified directory.
|
|
85
|
+
|
|
86
|
+
Queries the marketplace server for available versions and downloads the
|
|
87
|
+
requested package version. If no version is specified, downloads the latest
|
|
88
|
+
available version. The package is saved with the naming convention:
|
|
89
|
+
{package_id}-v{version}.cpa
|
|
90
|
+
|
|
91
|
+
If the package already exists in the cache, it will be reused instead of
|
|
92
|
+
re-downloading.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
path: Target directory where the package will be saved. If None, uses the
|
|
96
|
+
cache directory. Must be a directory, not a file path.
|
|
97
|
+
package_id: Marketplace package identifier (e.g., "semanticarts-gist-vocab").
|
|
98
|
+
package_version: Specific version to download. If None, downloads the latest version.
|
|
99
|
+
use_cache: If True, use cached version if available instead of downloading.
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
The full file path where the package was saved (e.g.,
|
|
103
|
+
/path/to/cache/semanticarts-gist-vocab-v13.0.0.cpa).
|
|
104
|
+
|
|
105
|
+
Raises:
|
|
106
|
+
MarketplaceReadError: If the marketplace server request fails or the package/version is not found.
|
|
107
|
+
"""
|
|
108
|
+
if path is None:
|
|
109
|
+
if self.cache_dir is not None:
|
|
110
|
+
path = self.cache_dir
|
|
111
|
+
else:
|
|
112
|
+
raise MarketplaceReadError("Cannot download without a directory defined.")
|
|
113
|
+
|
|
114
|
+
if not path.is_dir():
|
|
115
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
116
|
+
|
|
117
|
+
available_versions = self.get_versions_from_package(package_id=package_id)
|
|
118
|
+
|
|
119
|
+
version = str(available_versions[0]) if package_version is None else str(package_version)
|
|
120
|
+
|
|
121
|
+
filename = f"{package_id}-v{version}.cpa"
|
|
122
|
+
file_path = path / filename
|
|
123
|
+
|
|
124
|
+
if file_path.exists() and use_cache:
|
|
125
|
+
return file_path
|
|
126
|
+
|
|
127
|
+
url = self._marketplace_url / "api/packages" / package_id / "versions" / version
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
download_response = self._client.http.get(url=url)
|
|
131
|
+
download_response.raise_for_status()
|
|
132
|
+
except HTTPError as e:
|
|
133
|
+
raise MarketplaceReadError(f"Error on downloading package '{package_id}'.") from e
|
|
134
|
+
|
|
135
|
+
file_path.write_bytes(download_response.content)
|
|
136
|
+
return file_path
|
|
137
|
+
|
|
138
|
+
@log_method
|
|
139
|
+
def get_versions_from_package(
|
|
140
|
+
self,
|
|
141
|
+
package_id: PackageIdentifier,
|
|
142
|
+
) -> list[PackageVersionIdentifier]:
|
|
143
|
+
"""Get the available versions of a package from the marketplace server.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
package_id: Marketplace package identifier.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
List of package versions available.
|
|
150
|
+
"""
|
|
151
|
+
url = self.marketplace_url / "api/packages" / package_id / "versions"
|
|
152
|
+
try:
|
|
153
|
+
versions_response = self._client.http.get(url=url)
|
|
154
|
+
versions_response.raise_for_status()
|
|
155
|
+
versions = versions_response.json()
|
|
156
|
+
except HTTPError as e:
|
|
157
|
+
raise MarketplaceReadError(f"Error on retrieving package versions for '{package_id}'.") from e
|
|
158
|
+
|
|
159
|
+
return sorted([version.get("package_version") for version in versions])
|
|
160
|
+
|
|
161
|
+
@property
|
|
162
|
+
def marketplace_url(self) -> HttpUrl:
|
|
163
|
+
"""Get the marketplace server URL."""
|
|
164
|
+
return self._marketplace_url
|
|
165
|
+
|
|
166
|
+
@marketplace_url.setter
|
|
167
|
+
def marketplace_url(self, marketplace_url: HttpUrl | str) -> None:
|
|
168
|
+
"""Set the marketplace server URL."""
|
|
169
|
+
self._marketplace_url = marketplace_url if isinstance(marketplace_url, HttpUrl) else HttpUrl(marketplace_url)
|
|
170
|
+
|
|
171
|
+
@property
|
|
172
|
+
def cache_dir(self) -> Path | None:
|
|
173
|
+
"""Get the cache directory."""
|
|
174
|
+
return self._cache_dir
|
|
175
|
+
|
|
176
|
+
@cache_dir.setter
|
|
177
|
+
def cache_dir(self, cache_dir: Path | None) -> None:
|
|
178
|
+
"""Set the cache directory."""
|
|
179
|
+
self._cache_dir = cache_dir
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""SPARQL Wrapper for eccenca Corporate Memory"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from rdflib.plugins.stores.sparqlconnector import SPARQLConnector
|
|
9
|
+
|
|
10
|
+
from cmem_client.logging_utils import log_method
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from rdflib.query import Result
|
|
14
|
+
|
|
15
|
+
from cmem_client.client import Client
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SPARQLWrapper(SPARQLConnector):
|
|
19
|
+
"""Sparql wrapper class"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, sparql_endpoint: str, update_endpoint: str, client: Client) -> None:
|
|
22
|
+
self._client = client
|
|
23
|
+
|
|
24
|
+
access_token = client.auth.get_access_token()
|
|
25
|
+
headers = {"Authorization": f"Bearer {access_token}"}
|
|
26
|
+
|
|
27
|
+
super().__init__(
|
|
28
|
+
query_endpoint=sparql_endpoint,
|
|
29
|
+
update_endpoint=update_endpoint,
|
|
30
|
+
headers=headers,
|
|
31
|
+
method="POST",
|
|
32
|
+
)
|
|
33
|
+
self.logger = logging.getLogger(f"{self._client.logger.name}.{self.__class__.__name__}")
|
|
34
|
+
|
|
35
|
+
@log_method
|
|
36
|
+
def query(
|
|
37
|
+
self,
|
|
38
|
+
query: str,
|
|
39
|
+
default_graph: str | None = None,
|
|
40
|
+
named_graph: str | None = None,
|
|
41
|
+
) -> Result:
|
|
42
|
+
"""Query a SPARQL endpoint. This method overwrites the original for logging."""
|
|
43
|
+
return super().query(query, default_graph=default_graph, named_graph=named_graph)
|
|
44
|
+
|
|
45
|
+
@log_method
|
|
46
|
+
def update(
|
|
47
|
+
self,
|
|
48
|
+
query: str,
|
|
49
|
+
default_graph: str | None = None,
|
|
50
|
+
named_graph: str | None = None,
|
|
51
|
+
) -> None:
|
|
52
|
+
"""Perform update SPARQL query. This method overwrites the original for logging."""
|
|
53
|
+
return super().update(query, default_graph=default_graph, named_graph=named_graph)
|