cmem-client 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cmem_client/__init__.py +13 -0
- cmem_client/auth_provider/__init__.py +14 -0
- cmem_client/auth_provider/abc.py +124 -0
- cmem_client/auth_provider/client_credentials.py +207 -0
- cmem_client/auth_provider/password.py +252 -0
- cmem_client/auth_provider/prefetched_token.py +153 -0
- cmem_client/client.py +485 -0
- cmem_client/components/__init__.py +10 -0
- cmem_client/components/graph_store.py +316 -0
- cmem_client/components/marketplace.py +179 -0
- cmem_client/components/sparql_wrapper.py +53 -0
- cmem_client/components/workspace.py +194 -0
- cmem_client/config.py +364 -0
- cmem_client/exceptions.py +82 -0
- cmem_client/logging_utils.py +49 -0
- cmem_client/models/__init__.py +16 -0
- cmem_client/models/access_condition.py +147 -0
- cmem_client/models/base.py +30 -0
- cmem_client/models/dataset.py +32 -0
- cmem_client/models/error.py +67 -0
- cmem_client/models/graph.py +26 -0
- cmem_client/models/item.py +143 -0
- cmem_client/models/logging_config.py +51 -0
- cmem_client/models/package.py +35 -0
- cmem_client/models/project.py +46 -0
- cmem_client/models/python_package.py +26 -0
- cmem_client/models/token.py +40 -0
- cmem_client/models/url.py +34 -0
- cmem_client/models/workflow.py +80 -0
- cmem_client/repositories/__init__.py +15 -0
- cmem_client/repositories/access_conditions.py +62 -0
- cmem_client/repositories/base/__init__.py +12 -0
- cmem_client/repositories/base/abc.py +138 -0
- cmem_client/repositories/base/paged_list.py +63 -0
- cmem_client/repositories/base/plain_list.py +39 -0
- cmem_client/repositories/base/task_search.py +70 -0
- cmem_client/repositories/datasets.py +36 -0
- cmem_client/repositories/graph_imports.py +93 -0
- cmem_client/repositories/graphs.py +458 -0
- cmem_client/repositories/marketplace_packages.py +486 -0
- cmem_client/repositories/projects.py +214 -0
- cmem_client/repositories/protocols/__init__.py +15 -0
- cmem_client/repositories/protocols/create_item.py +125 -0
- cmem_client/repositories/protocols/delete_item.py +95 -0
- cmem_client/repositories/protocols/export_item.py +114 -0
- cmem_client/repositories/protocols/import_item.py +141 -0
- cmem_client/repositories/python_packages.py +58 -0
- cmem_client/repositories/workflows.py +143 -0
- cmem_client-0.5.0.dist-info/METADATA +64 -0
- cmem_client-0.5.0.dist-info/RECORD +52 -0
- cmem_client-0.5.0.dist-info/WHEEL +4 -0
- cmem_client-0.5.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"""Corporate Memory DataIntegration (build) workspace management.
|
|
2
|
+
|
|
3
|
+
This module provides the BuildWorkspace component for managing Corporate Memory's
|
|
4
|
+
DataIntegration workspace. The workspace contains projects, datasets, transformations,
|
|
5
|
+
and other integration artifacts organized in a hierarchical structure.
|
|
6
|
+
|
|
7
|
+
The BuildWorkspace component provides high-level operations for workspace backup
|
|
8
|
+
and restoration, allowing entire workspace snapshots to be exported and imported
|
|
9
|
+
as ZIP archives. This is essential for deployment, migration, and disaster recovery
|
|
10
|
+
scenarios.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
from typing import TYPE_CHECKING
|
|
17
|
+
|
|
18
|
+
from cmem_client.logging_utils import log_method
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from httpx import Response
|
|
24
|
+
|
|
25
|
+
from cmem_client.client import Client
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class BuildWorkspace:
|
|
29
|
+
"""High-level interface for Corporate Memory DataIntegration workspace operations.
|
|
30
|
+
|
|
31
|
+
The BuildWorkspace component provides administrative and operational methods for
|
|
32
|
+
managing the Corporate Memory DataIntegration (build) workspace. It handles
|
|
33
|
+
workspace-level operations including complete backup and restoration of all
|
|
34
|
+
workspace contents as ZIP archives.
|
|
35
|
+
|
|
36
|
+
The workspace contains all DataIntegration artifacts including:
|
|
37
|
+
- Projects and their configurations
|
|
38
|
+
- Datasets and data sources
|
|
39
|
+
- Transformation workflows and mapping rules
|
|
40
|
+
- Workflow definitions and scheduling configurations
|
|
41
|
+
|
|
42
|
+
This component abstracts the complexities of the DataIntegration API and provides
|
|
43
|
+
a convenient interface for workspace-wide administrative tasks.
|
|
44
|
+
|
|
45
|
+
Attributes:
|
|
46
|
+
_client: The Corporate Memory client instance used for API communication.
|
|
47
|
+
|
|
48
|
+
Administrative Operations:
|
|
49
|
+
- Complete workspace backup and restoration
|
|
50
|
+
- Environment synchronization and migration
|
|
51
|
+
- Disaster recovery and rollback capabilities
|
|
52
|
+
- Deployment automation and CI/CD integration
|
|
53
|
+
|
|
54
|
+
See Also:
|
|
55
|
+
For individual project operations, use the repositories.projects module
|
|
56
|
+
which provides CRUD operations for specific DataIntegration projects.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
_client: Client
|
|
60
|
+
"""The Corporate Memory client instance used for making API requests to the DataIntegration API."""
|
|
61
|
+
|
|
62
|
+
def __init__(self, client: Client) -> None:
|
|
63
|
+
"""Initialize a new BuildWorkspace component instance.
|
|
64
|
+
|
|
65
|
+
Creates a BuildWorkspace component that uses the provided client for
|
|
66
|
+
API communication with the DataIntegration workspace endpoints.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
client: A configured Corporate Memory client instance with
|
|
70
|
+
authentication and endpoint configuration.
|
|
71
|
+
|
|
72
|
+
Note:
|
|
73
|
+
This constructor is typically called automatically by the
|
|
74
|
+
Client class when accessing the workspace property. Direct
|
|
75
|
+
instantiation is rarely needed in normal usage.
|
|
76
|
+
"""
|
|
77
|
+
self._client = client
|
|
78
|
+
self.logger = logging.getLogger(f"{self._client.logger.name}.{self.__class__.__name__}")
|
|
79
|
+
|
|
80
|
+
@log_method
|
|
81
|
+
def import_from_zip(self, path: Path) -> Response:
|
|
82
|
+
"""Import and restore a complete workspace backup from a ZIP archive.
|
|
83
|
+
|
|
84
|
+
Warning: This operation overwrites existing workspace content.
|
|
85
|
+
All projects, datasets, transformations, and other workspace artifacts will be
|
|
86
|
+
replaced or removed during the import process.
|
|
87
|
+
|
|
88
|
+
Restores a Corporate Memory DataIntegration workspace from a ZIP backup
|
|
89
|
+
archive created by export_to_zip(). The import process loads all workspace
|
|
90
|
+
artifacts including projects, datasets, transformations, vocabularies,
|
|
91
|
+
and configurations from the archive into the current workspace.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
path: The file system path to the ZIP backup archive to import.
|
|
95
|
+
The file must be a valid workspace backup archive created by
|
|
96
|
+
export_to_zip() or compatible with the DataIntegration workspace format.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Response: The HTTP response object from the import operation.
|
|
100
|
+
Check response.status_code for success (200) and response.json()
|
|
101
|
+
for detailed import results and any warnings or errors.
|
|
102
|
+
|
|
103
|
+
Raises:
|
|
104
|
+
HTTPError: If the import request fails due to network issues, server
|
|
105
|
+
errors, insufficient permissions, or invalid archive format.
|
|
106
|
+
OSError: If the specified backup file cannot be read due to file system
|
|
107
|
+
permissions or if the file does not exist.
|
|
108
|
+
|
|
109
|
+
Important Considerations:
|
|
110
|
+
- **Data Validation**: Invalid configurations in the archive may cause failures
|
|
111
|
+
- **Dependency Resolution**: Project dependencies must be satisfied after import
|
|
112
|
+
|
|
113
|
+
Performance Notes:
|
|
114
|
+
- Large workspace archives may take significant time to import
|
|
115
|
+
- The workspace may be partially unavailable during import
|
|
116
|
+
- Network bandwidth affects upload speed for large archives
|
|
117
|
+
- Import processing time depends on workspace complexity
|
|
118
|
+
|
|
119
|
+
Use Cases:
|
|
120
|
+
- Environment synchronization between development and production
|
|
121
|
+
- Workspace migration between Corporate Memory instances
|
|
122
|
+
- Disaster recovery from workspace backups
|
|
123
|
+
- Deployment automation and CI/CD pipeline integration
|
|
124
|
+
- Team collaboration and workspace sharing
|
|
125
|
+
|
|
126
|
+
See Also:
|
|
127
|
+
Use export_to_zip() to create workspace archives for import with this method.
|
|
128
|
+
"""
|
|
129
|
+
url = self._client.config.url_build_api / "/workspace/import/xmlZip"
|
|
130
|
+
files = {"file": (path.name, path.open("rb"), "application/octet-stream")}
|
|
131
|
+
response = self._client.http.post(url=url, files=files)
|
|
132
|
+
response.raise_for_status()
|
|
133
|
+
return response
|
|
134
|
+
|
|
135
|
+
@log_method
|
|
136
|
+
def export_to_zip(self, path: Path) -> None:
|
|
137
|
+
"""Export a complete backup of the workspace as a ZIP archive.
|
|
138
|
+
|
|
139
|
+
Creates a comprehensive backup of the entire Corporate Memory DataIntegration
|
|
140
|
+
workspace, including all projects, datasets, transformations, vocabularies,
|
|
141
|
+
workflows, and configurations. The backup is streamed directly to the
|
|
142
|
+
specified file path as a compressed ZIP archive.
|
|
143
|
+
|
|
144
|
+
This operation creates a point-in-time snapshot of the complete workspace
|
|
145
|
+
that can be used for:
|
|
146
|
+
- Environment migration and synchronization
|
|
147
|
+
- Disaster recovery and backup strategies
|
|
148
|
+
- Development and testing environment setup
|
|
149
|
+
- Deployment automation and CI/CD pipelines
|
|
150
|
+
- Team collaboration and workspace sharing
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
path: The file system path where the ZIP workspace archive will be saved.
|
|
154
|
+
The path should include the .zip extension and the parent directory
|
|
155
|
+
must exist and be writable.
|
|
156
|
+
|
|
157
|
+
Raises:
|
|
158
|
+
HTTPError: If the export request fails due to network issues, server
|
|
159
|
+
errors, or insufficient permissions.
|
|
160
|
+
OSError: If the specified path cannot be written to due to file system
|
|
161
|
+
permissions or disk space issues.
|
|
162
|
+
|
|
163
|
+
Performance Notes:
|
|
164
|
+
- The export is streamed directly to disk to minimize memory usage
|
|
165
|
+
- Large workspaces may take significant time to export completely
|
|
166
|
+
- Network bandwidth and storage I/O will impact export duration
|
|
167
|
+
- The operation blocks until the entire workspace is exported
|
|
168
|
+
- Export size depends on workspace complexity and resource files
|
|
169
|
+
|
|
170
|
+
Security Considerations:
|
|
171
|
+
- Workspace archives contain all project data and configurations
|
|
172
|
+
- May include database connection strings and access credentials
|
|
173
|
+
- Should be stored securely with appropriate access controls
|
|
174
|
+
- Consider encryption for sensitive workspace data
|
|
175
|
+
- Review archive contents before sharing or transferring
|
|
176
|
+
|
|
177
|
+
Use Cases:
|
|
178
|
+
- **Environment Promotion**: Move workspace from dev to production
|
|
179
|
+
- **Disaster Recovery**: Regular backups for business continuity
|
|
180
|
+
- **Team Onboarding**: Share workspace setups with new team members
|
|
181
|
+
- **CI/CD Integration**: Automated workspace deployment pipelines
|
|
182
|
+
- **Migration Support**: Transfer workspaces between instances
|
|
183
|
+
- **Version Control**: Track workspace state changes over time
|
|
184
|
+
|
|
185
|
+
See Also:
|
|
186
|
+
Use import_from_zip() to restore workspace archives created by this method.
|
|
187
|
+
"""
|
|
188
|
+
url = self._client.config.url_build_api / "/workspace/export/xmlZip"
|
|
189
|
+
with (
|
|
190
|
+
path.open("wb") as download_file,
|
|
191
|
+
self._client.http.stream(method="GET", url=url) as response,
|
|
192
|
+
):
|
|
193
|
+
for chunk in response.iter_bytes():
|
|
194
|
+
download_file.write(chunk)
|
cmem_client/config.py
ADDED
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
"""Configuration management for the Corporate Memory client.
|
|
2
|
+
|
|
3
|
+
This module provides the Config class that handles all configuration aspects
|
|
4
|
+
of the Corporate Memory client, including URL construction, SSL verification,
|
|
5
|
+
authentication endpoints, and environment variable parsing.
|
|
6
|
+
|
|
7
|
+
The Config class automatically constructs various API endpoints based on a base URL
|
|
8
|
+
and provides flexible configuration through both programmatic setup and environment
|
|
9
|
+
variables, making it suitable for different deployment environments.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from os import getenv
|
|
13
|
+
|
|
14
|
+
from cmem_client.exceptions import ClientEnvConfigError
|
|
15
|
+
from cmem_client.models.url import HttpUrl
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class Config:
|
|
19
|
+
"""Corporate Memory Client configuration.
|
|
20
|
+
|
|
21
|
+
The Config class manages all configuration aspects for connecting to Corporate
|
|
22
|
+
Memory instances, including URL construction, SSL verification, timeout settings,
|
|
23
|
+
and authentication endpoints. It provides both programmatic configuration and
|
|
24
|
+
automatic configuration from environment variables.
|
|
25
|
+
|
|
26
|
+
The class automatically constructs various API endpoints based on a base URL
|
|
27
|
+
and realm configuration, with support for customizing individual endpoints
|
|
28
|
+
when needed for complex deployment scenarios.
|
|
29
|
+
|
|
30
|
+
Attributes:
|
|
31
|
+
_realm_id: The Keycloak realm identifier for authentication.
|
|
32
|
+
_verify: SSL/TLS certificate verification flag.
|
|
33
|
+
_url_base: Base URL of the Corporate Memory instance.
|
|
34
|
+
_url_keycloak: Base URL of the Keycloak authentication server.
|
|
35
|
+
_url_keycloak_issuer: Keycloak realm issuer URL for token validation.
|
|
36
|
+
_url_build_api: DataIntegration (build) API endpoint URL.
|
|
37
|
+
_url_explore_api: DataPlatform (explore) API endpoint URL.
|
|
38
|
+
_url_oauth_token: OAuth token endpoint URL for authentication.
|
|
39
|
+
timeout: HTTP request timeout in seconds.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
_realm_id: str = "cmem"
|
|
43
|
+
"""Keycloak realm identifier, defaults to 'cmem' for standard deployments."""
|
|
44
|
+
|
|
45
|
+
_verify: bool = True
|
|
46
|
+
"""SSL/TLS certificate verification flag, defaults to True for security."""
|
|
47
|
+
|
|
48
|
+
_url_base: HttpUrl
|
|
49
|
+
"""Base URL of the Corporate Memory instance, used to construct other endpoints."""
|
|
50
|
+
|
|
51
|
+
_url_keycloak: HttpUrl
|
|
52
|
+
"""Base URL of the Keycloak authentication server, derived from base URL if not set."""
|
|
53
|
+
|
|
54
|
+
_url_keycloak_issuer: HttpUrl
|
|
55
|
+
"""Keycloak realm issuer URL, constructed from Keycloak URL and realm ID."""
|
|
56
|
+
|
|
57
|
+
_url_build_api: HttpUrl
|
|
58
|
+
"""DataIntegration (build) API endpoint URL, derived from base URL if not set."""
|
|
59
|
+
|
|
60
|
+
_url_explore_api: HttpUrl
|
|
61
|
+
"""DataPlatform (explore) API endpoint URL, derived from base URL if not set."""
|
|
62
|
+
|
|
63
|
+
_url_oauth_token: HttpUrl
|
|
64
|
+
"""OAuth token endpoint URL, constructed from Keycloak issuer URL."""
|
|
65
|
+
|
|
66
|
+
timeout: int = 10
|
|
67
|
+
"""HTTP request timeout in seconds, defaults to 10 seconds."""
|
|
68
|
+
|
|
69
|
+
def __init__(self, url_base: HttpUrl | str, realm_id: str = "cmem") -> None:
|
|
70
|
+
"""Initialize a new Config instance.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
url_base: The base URL of the Corporate Memory instance. Can be
|
|
74
|
+
provided as either an HttpUrl object or a string that will
|
|
75
|
+
be converted to HttpUrl.
|
|
76
|
+
realm_id: The Keycloak realm identifier for authentication.
|
|
77
|
+
Defaults to "cmem" for standard Corporate Memory deployments.
|
|
78
|
+
"""
|
|
79
|
+
self.url_base = HttpUrl(url_base) if isinstance(url_base, str) else url_base
|
|
80
|
+
self.realm_id = realm_id
|
|
81
|
+
|
|
82
|
+
@classmethod
|
|
83
|
+
def from_env(cls) -> "Config":
|
|
84
|
+
"""Create a Config instance from environment variables.
|
|
85
|
+
|
|
86
|
+
This factory method creates a configuration by reading various environment
|
|
87
|
+
variables that specify Corporate Memory connection details. It provides
|
|
88
|
+
a convenient way to configure the client in containerized or cloud
|
|
89
|
+
environments where configuration is managed through environment variables.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
A Config instance configured with values from environment variables.
|
|
93
|
+
|
|
94
|
+
Raises:
|
|
95
|
+
ClientEnvConfigError: If the required CMEM_BASE_URI environment
|
|
96
|
+
variable is not set.
|
|
97
|
+
|
|
98
|
+
Environment Variables:
|
|
99
|
+
CMEM_BASE_URI (required): Base URL of the Corporate Memory instance.
|
|
100
|
+
DI_API_ENDPOINT (optional): DataIntegration API endpoint override.
|
|
101
|
+
DP_API_ENDPOINT (optional): DataPlatform API endpoint override.
|
|
102
|
+
KEYCLOAK_BASE_URI (optional): Keycloak server URL override.
|
|
103
|
+
KEYCLOAK_REALM_ID (optional): Keycloak realm identifier override.
|
|
104
|
+
OAUTH_TOKEN_URI (optional): OAuth token endpoint override.
|
|
105
|
+
SSL_VERIFY (optional): SSL certificate verification flag.
|
|
106
|
+
"""
|
|
107
|
+
cmem_base_uri = getenv("CMEM_BASE_URI")
|
|
108
|
+
di_api_endpoint = getenv("DI_API_ENDPOINT")
|
|
109
|
+
dp_api_endpoint = getenv("DP_API_ENDPOINT")
|
|
110
|
+
keycloak_base_uri = getenv("KEYCLOAK_BASE_URI")
|
|
111
|
+
keycloak_realm_id = getenv("KEYCLOAK_REALM_ID")
|
|
112
|
+
oauth_token_uri = getenv("OAUTH_TOKEN_URI")
|
|
113
|
+
ssl_verify = getenv("SSL_VERIFY")
|
|
114
|
+
"""
|
|
115
|
+
requests_ca_bundle = getenv("REQUESTS_CA_BUNDLE")
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
if not cmem_base_uri:
|
|
119
|
+
raise ClientEnvConfigError("CMEM_BASE_URI environment variable not set.")
|
|
120
|
+
config = cls(url_base=cmem_base_uri)
|
|
121
|
+
|
|
122
|
+
if ssl_verify:
|
|
123
|
+
config.verify = bool(ssl_verify)
|
|
124
|
+
if keycloak_realm_id:
|
|
125
|
+
config.realm_id = keycloak_realm_id
|
|
126
|
+
if keycloak_base_uri:
|
|
127
|
+
config.url_keycloak = HttpUrl(keycloak_base_uri)
|
|
128
|
+
if oauth_token_uri:
|
|
129
|
+
config.url_oauth_token = HttpUrl(oauth_token_uri)
|
|
130
|
+
if di_api_endpoint:
|
|
131
|
+
config.url_build_api = HttpUrl(di_api_endpoint)
|
|
132
|
+
if dp_api_endpoint:
|
|
133
|
+
config.url_explore_api = HttpUrl(dp_api_endpoint)
|
|
134
|
+
return config
|
|
135
|
+
|
|
136
|
+
@classmethod
|
|
137
|
+
def from_cmempy(cls) -> "Config":
|
|
138
|
+
"""Create a Config instance from a cmempy environment."""
|
|
139
|
+
try:
|
|
140
|
+
import cmem.cmempy.config as cmempy_config # noqa: PLC0415
|
|
141
|
+
except ImportError as error:
|
|
142
|
+
raise OSError("cmempy is not installed.") from error
|
|
143
|
+
cmem_base_uri = cmempy_config.get_cmem_base_uri()
|
|
144
|
+
di_api_endpoint = cmempy_config.get_di_api_endpoint()
|
|
145
|
+
dp_api_endpoint = cmempy_config.get_dp_api_endpoint()
|
|
146
|
+
keycloak_base_uri = cmempy_config.get_keycloak_base_uri()
|
|
147
|
+
keycloak_realm_id = cmempy_config.get_keycloak_realm_id()
|
|
148
|
+
oauth_token_uri = cmempy_config.get_oauth_token_uri()
|
|
149
|
+
ssl_verify = cmempy_config.get_ssl_verify()
|
|
150
|
+
"""
|
|
151
|
+
requests_ca_bundle = getenv("REQUESTS_CA_BUNDLE")
|
|
152
|
+
"""
|
|
153
|
+
|
|
154
|
+
if not cmem_base_uri:
|
|
155
|
+
raise ClientEnvConfigError("CMEM_BASE_URI environment variable not set.")
|
|
156
|
+
config = cls(url_base=cmem_base_uri)
|
|
157
|
+
|
|
158
|
+
if ssl_verify:
|
|
159
|
+
config.verify = bool(ssl_verify)
|
|
160
|
+
if keycloak_realm_id:
|
|
161
|
+
config.realm_id = keycloak_realm_id
|
|
162
|
+
if keycloak_base_uri:
|
|
163
|
+
config.url_keycloak = HttpUrl(keycloak_base_uri)
|
|
164
|
+
if oauth_token_uri:
|
|
165
|
+
config.url_oauth_token = HttpUrl(oauth_token_uri)
|
|
166
|
+
if di_api_endpoint:
|
|
167
|
+
config.url_build_api = HttpUrl(di_api_endpoint)
|
|
168
|
+
if dp_api_endpoint:
|
|
169
|
+
config.url_explore_api = HttpUrl(dp_api_endpoint)
|
|
170
|
+
return config
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def verify(self) -> bool:
|
|
174
|
+
"""Get the SSL/TLS certificate verification flag.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
True if SSL/TLS certificates should be verified, False otherwise.
|
|
178
|
+
Defaults to True for security reasons.
|
|
179
|
+
|
|
180
|
+
Note:
|
|
181
|
+
Disabling SSL verification should only be done in development
|
|
182
|
+
environments. Production deployments should always verify certificates.
|
|
183
|
+
"""
|
|
184
|
+
return self._verify
|
|
185
|
+
|
|
186
|
+
@verify.setter
|
|
187
|
+
def verify(self, value: bool) -> None:
|
|
188
|
+
"""Set the SSL/TLS certificate verification flag.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
value: True to enable SSL/TLS certificate verification,
|
|
192
|
+
False to disable it.
|
|
193
|
+
|
|
194
|
+
Warning:
|
|
195
|
+
Disabling SSL verification reduces security and should only be
|
|
196
|
+
done in development environments or with proper security measures.
|
|
197
|
+
"""
|
|
198
|
+
self._verify = value
|
|
199
|
+
|
|
200
|
+
@property
|
|
201
|
+
def url_base(self) -> HttpUrl:
|
|
202
|
+
"""Get the base URL of the Corporate Memory instance.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
The base URL from which all other API endpoints are derived.
|
|
206
|
+
This is the root URL of the Corporate Memory deployment.
|
|
207
|
+
"""
|
|
208
|
+
return self._url_base
|
|
209
|
+
|
|
210
|
+
@url_base.setter
|
|
211
|
+
def url_base(self, value: HttpUrl) -> None:
|
|
212
|
+
"""Set the base URL of the Corporate Memory instance.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
value: The base URL of the Corporate Memory instance.
|
|
216
|
+
Must be a valid HttpUrl object.
|
|
217
|
+
|
|
218
|
+
Note:
|
|
219
|
+
Changing the base URL will affect the construction of all
|
|
220
|
+
derived endpoints unless they have been explicitly overridden.
|
|
221
|
+
"""
|
|
222
|
+
self._url_base = value
|
|
223
|
+
|
|
224
|
+
@property
|
|
225
|
+
def url_explore_api(self) -> HttpUrl:
|
|
226
|
+
"""Get the DataPlatform (explore) API endpoint URL.
|
|
227
|
+
|
|
228
|
+
Returns the URL for the DataPlatform API, which handles graph storage,
|
|
229
|
+
SPARQL queries, and semantic data exploration. If not explicitly set,
|
|
230
|
+
it defaults to the base URL with '/dataplatform/' appended.
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
The DataPlatform API endpoint URL.
|
|
234
|
+
"""
|
|
235
|
+
try:
|
|
236
|
+
return self._url_explore_api
|
|
237
|
+
except AttributeError:
|
|
238
|
+
return self.url_base / "/dataplatform/"
|
|
239
|
+
|
|
240
|
+
@url_explore_api.setter
|
|
241
|
+
def url_explore_api(self, value: HttpUrl) -> None:
|
|
242
|
+
"""Set the DataPlatform (explore) API endpoint URL.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
value: The DataPlatform API endpoint URL. This overrides the
|
|
246
|
+
default URL construction based on the base URL.
|
|
247
|
+
|
|
248
|
+
Note:
|
|
249
|
+
Setting this explicitly allows for custom API endpoint configurations
|
|
250
|
+
in complex deployment scenarios with separate DataPlatform services.
|
|
251
|
+
"""
|
|
252
|
+
self._url_explore_api = value
|
|
253
|
+
|
|
254
|
+
@property
|
|
255
|
+
def url_build_api(self) -> HttpUrl:
|
|
256
|
+
"""Get the DataIntegration (build) API endpoint URL.
|
|
257
|
+
|
|
258
|
+
Returns the URL for the DataIntegration API, which handles projects,
|
|
259
|
+
datasets, transformations, and data integration workflows. If not
|
|
260
|
+
explicitly set, it defaults to the base URL with '/dataintegration/' appended.
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
The DataIntegration API endpoint URL.
|
|
264
|
+
"""
|
|
265
|
+
try:
|
|
266
|
+
return self._url_build_api
|
|
267
|
+
except AttributeError:
|
|
268
|
+
return self.url_base / "/dataintegration/"
|
|
269
|
+
|
|
270
|
+
@url_build_api.setter
|
|
271
|
+
def url_build_api(self, value: HttpUrl) -> None:
|
|
272
|
+
"""Set the DataIntegration (build) API endpoint URL.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
value: The DataIntegration API endpoint URL. This overrides the
|
|
276
|
+
default URL construction based on the base URL.
|
|
277
|
+
|
|
278
|
+
Note:
|
|
279
|
+
Setting this explicitly allows for custom API endpoint configurations
|
|
280
|
+
in complex deployment scenarios with separate DataIntegration services.
|
|
281
|
+
"""
|
|
282
|
+
self._url_build_api = value
|
|
283
|
+
|
|
284
|
+
@property
|
|
285
|
+
def url_keycloak(self) -> HttpUrl:
|
|
286
|
+
"""Get the Keycloak authentication server base URL.
|
|
287
|
+
|
|
288
|
+
Returns the URL of the Keycloak server used for authentication and
|
|
289
|
+
authorization. If not explicitly set, it defaults to the base URL
|
|
290
|
+
with '/auth/' appended.
|
|
291
|
+
|
|
292
|
+
Returns:
|
|
293
|
+
The Keycloak server base URL.
|
|
294
|
+
"""
|
|
295
|
+
try:
|
|
296
|
+
return self._url_keycloak
|
|
297
|
+
except AttributeError:
|
|
298
|
+
return self.url_base / "/auth/"
|
|
299
|
+
|
|
300
|
+
@url_keycloak.setter
|
|
301
|
+
def url_keycloak(self, value: HttpUrl) -> None:
|
|
302
|
+
"""Set the Keycloak authentication server base URL.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
value: The Keycloak server base URL. This overrides the default
|
|
306
|
+
URL construction based on the base URL.
|
|
307
|
+
|
|
308
|
+
Note:
|
|
309
|
+
Setting this explicitly allows for configurations where Keycloak
|
|
310
|
+
is deployed separately from the main Corporate Memory instance.
|
|
311
|
+
"""
|
|
312
|
+
self._url_keycloak = value
|
|
313
|
+
|
|
314
|
+
@property
|
|
315
|
+
def url_keycloak_issuer(self) -> HttpUrl:
|
|
316
|
+
"""Get the Keycloak realm issuer URL.
|
|
317
|
+
|
|
318
|
+
Returns the issuer URL for the specific Keycloak realm, which is used
|
|
319
|
+
for token validation and OpenID Connect flows. This URL is constructed
|
|
320
|
+
from the Keycloak base URL and the realm identifier.
|
|
321
|
+
|
|
322
|
+
Returns:
|
|
323
|
+
The Keycloak realm issuer URL.
|
|
324
|
+
|
|
325
|
+
Note:
|
|
326
|
+
This property cannot be set directly. It is automatically constructed
|
|
327
|
+
based on the Keycloak URL and realm ID. To customize it, set the
|
|
328
|
+
url_keycloak property and realm_id attribute instead.
|
|
329
|
+
"""
|
|
330
|
+
try:
|
|
331
|
+
return self._url_keycloak_issuer
|
|
332
|
+
except AttributeError:
|
|
333
|
+
return self.url_keycloak / f"/realms/{self.realm_id}/"
|
|
334
|
+
|
|
335
|
+
@property
|
|
336
|
+
def url_oauth_token(self) -> HttpUrl:
|
|
337
|
+
"""Get the OAuth 2.0 token endpoint URL.
|
|
338
|
+
|
|
339
|
+
Returns the URL for the OAuth 2.0 token endpoint, which is used by
|
|
340
|
+
authentication providers to obtain access tokens. If not explicitly
|
|
341
|
+
set, it defaults to the standard OpenID Connect token endpoint path
|
|
342
|
+
within the Keycloak realm.
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
The OAuth 2.0 token endpoint URL.
|
|
346
|
+
"""
|
|
347
|
+
try:
|
|
348
|
+
return self._url_oauth_token
|
|
349
|
+
except AttributeError:
|
|
350
|
+
return self.url_keycloak_issuer / "/protocol/openid-connect/token"
|
|
351
|
+
|
|
352
|
+
@url_oauth_token.setter
|
|
353
|
+
def url_oauth_token(self, value: HttpUrl) -> None:
|
|
354
|
+
"""Set the OAuth 2.0 token endpoint URL.
|
|
355
|
+
|
|
356
|
+
Args:
|
|
357
|
+
value: The OAuth 2.0 token endpoint URL. This overrides the
|
|
358
|
+
default URL construction based on the Keycloak issuer URL.
|
|
359
|
+
|
|
360
|
+
Note:
|
|
361
|
+
Setting this explicitly allows for custom OAuth configurations
|
|
362
|
+
or alternative token endpoints in specialized deployment scenarios.
|
|
363
|
+
"""
|
|
364
|
+
self._url_oauth_token = value
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""Custom exception classes for the cmem_client package.
|
|
2
|
+
|
|
3
|
+
This module defines all custom exceptions used throughout the cmem_client library,
|
|
4
|
+
providing specific error types for different failure scenarios such as authentication,
|
|
5
|
+
configuration, and repository operations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BaseError(Exception):
|
|
10
|
+
"""Base exception for all cmem_client exceptions."""
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ClientNoAuthProviderError(BaseError):
|
|
14
|
+
"""Exception raised when no auth provider is given but needed."""
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ClientEnvConfigError(BaseError):
|
|
18
|
+
"""Exception raised when an environment key is missing."""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class RepositoryItemNotFoundError(BaseError):
|
|
22
|
+
"""Exception raised when a specific item is missing in a repository."""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RepositoryConfigError(BaseError):
|
|
26
|
+
"""Exception raised when a repository configuration is invalid."""
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class RepositoryModificationError(BaseError):
|
|
30
|
+
"""Exception raised when a repository modification failed or is invalid."""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class RepositoryReadError(BaseError):
|
|
34
|
+
"""Exception raised when a repository read operation failed or is invalid."""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class MarketplaceReadError(BaseError):
|
|
38
|
+
"""Exception raised when a marketplace read operation failed or is invalid."""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MarketplaceWriteError(BaseError):
|
|
42
|
+
"""Exception raised when a marketplace write operation failed or is invalid."""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class WorkflowReadError(BaseError):
|
|
46
|
+
"""Exception raised when a workflow read operation failed or is invalid."""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class WorkflowExecutionError(BaseError):
|
|
50
|
+
"""Exception raised when a workflow execution operation failed or is invalid."""
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class GraphImportError(RepositoryModificationError):
|
|
54
|
+
"""Exception raised when a vocabulary import operation fails."""
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class GraphExportError(BaseError):
|
|
58
|
+
"""Exception raised when a vocabulary export operation fails."""
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class ProjectImportError(RepositoryModificationError):
|
|
62
|
+
"""Exception raised when a project import operation fails."""
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class ProjectExportError(RepositoryReadError):
|
|
66
|
+
"""Exception raised when a project export operation fails."""
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class PythonPackageImportError(RepositoryModificationError):
|
|
70
|
+
"""Exception raised when a python plugin import fails."""
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class MarketplacePackagesImportError(RepositoryModificationError):
|
|
74
|
+
"""Exception raised when a marketplace package installation fails."""
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class MarketplacePackagesDeleteError(RepositoryModificationError):
|
|
78
|
+
"""Exception raised when a marketplace package deletion fails."""
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class MarketplacePackagesExportError(BaseError):
|
|
82
|
+
"""Exception raised when a marketplace packages export fails."""
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"""Logging utilities.
|
|
2
|
+
|
|
3
|
+
Note: This module uses Any for kwargs to match the stdlib logging interface signature.
|
|
4
|
+
"""
|
|
5
|
+
# ruff: noqa: ANN401
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from collections.abc import Callable
|
|
9
|
+
from functools import wraps
|
|
10
|
+
from typing import Any, ParamSpec, TypeVar
|
|
11
|
+
|
|
12
|
+
GenericParameter = ParamSpec("GenericParameter")
|
|
13
|
+
GenericResult = TypeVar("GenericResult")
|
|
14
|
+
|
|
15
|
+
TRACE_LEVEL = 5
|
|
16
|
+
logging.addLevelName(TRACE_LEVEL, "TRACE")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _trace_method(self: logging.Logger, message: str, *args: object, **kwargs: Any) -> None:
|
|
20
|
+
"""Method to add to Logger instances for TRACE level."""
|
|
21
|
+
if self.isEnabledFor(TRACE_LEVEL):
|
|
22
|
+
self.log(TRACE_LEVEL, message, *args, **kwargs)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def install_trace_logger() -> None:
|
|
26
|
+
"""Install TRACE level logging dynamically."""
|
|
27
|
+
logging.Logger.trace = _trace_method # type: ignore[attr-defined]
|
|
28
|
+
|
|
29
|
+
def trace_root(message: str, *args: object, **kwargs: Any) -> None:
|
|
30
|
+
logging.getLogger().log(TRACE_LEVEL, message, *args, **kwargs)
|
|
31
|
+
|
|
32
|
+
logging.trace = trace_root # type: ignore[attr-defined]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def log_method(method: Callable[GenericParameter, GenericResult], display_name: str | None = None): # noqa: ANN201
|
|
36
|
+
"""Wrapper to log entry and exit of methods using TRACE level.
|
|
37
|
+
|
|
38
|
+
Note: Don't use this on methods with sensitive information as they might get logged too
|
|
39
|
+
"""
|
|
40
|
+
display_name = display_name or method.__name__
|
|
41
|
+
|
|
42
|
+
@wraps(method)
|
|
43
|
+
def _wrapper(self: Any, *args: Any, **kwargs: Any): # noqa: ANN202
|
|
44
|
+
self.logger.trace(f"{display_name}() called with {args!r} and {kwargs!r}") # type: ignore[attr-defined]
|
|
45
|
+
result = method(self, *args, **kwargs)
|
|
46
|
+
self.logger.trace(f"{display_name}() returned {result!r}") # type: ignore[attr-defined]
|
|
47
|
+
return result
|
|
48
|
+
|
|
49
|
+
return _wrapper
|