chuk-artifacts 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chuk_artifacts/__init__.py +149 -0
- chuk_artifacts/admin.py +79 -0
- chuk_artifacts/base.py +75 -0
- chuk_artifacts/batch.py +115 -0
- chuk_artifacts/config.py +338 -0
- chuk_artifacts/core.py +215 -0
- chuk_artifacts/exceptions.py +37 -0
- chuk_artifacts/metadata.py +286 -0
- chuk_artifacts/models.py +23 -0
- chuk_artifacts/presigned.py +267 -0
- chuk_artifacts/provider_factory.py +84 -0
- chuk_artifacts/providers/__init__.py +10 -0
- chuk_artifacts/providers/filesystem.py +453 -0
- chuk_artifacts/providers/ibm_cos.py +121 -0
- chuk_artifacts/providers/ibm_cos_iam.py +82 -0
- chuk_artifacts/providers/memory.py +315 -0
- chuk_artifacts/providers/s3.py +90 -0
- chuk_artifacts/store.py +383 -0
- chuk_artifacts-0.1.0.dist-info/METADATA +519 -0
- chuk_artifacts-0.1.0.dist-info/RECORD +23 -0
- chuk_artifacts-0.1.0.dist-info/WHEEL +5 -0
- chuk_artifacts-0.1.0.dist-info/licenses/LICENSE +21 -0
- chuk_artifacts-0.1.0.dist-info/top_level.txt +1 -0
chuk_artifacts/config.py
ADDED
@@ -0,0 +1,338 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
# chuk_artifacts/config.py
|
3
|
+
"""
|
4
|
+
Configuration helpers for ArtifactStore.
|
5
|
+
|
6
|
+
Provides convenient functions to set up common configurations
|
7
|
+
without needing to write .env files or remember all the variable names.
|
8
|
+
"""
|
9
|
+
|
10
|
+
import os
|
11
|
+
from typing import Dict, Optional
|
12
|
+
from .store import ArtifactStore
|
13
|
+
|
14
|
+
|
15
|
+
def configure_memory() -> Dict[str, str]:
|
16
|
+
"""
|
17
|
+
Configure for in-memory storage (development/testing).
|
18
|
+
|
19
|
+
Returns
|
20
|
+
-------
|
21
|
+
dict
|
22
|
+
Environment variables that were set
|
23
|
+
"""
|
24
|
+
env_vars = {
|
25
|
+
"ARTIFACT_PROVIDER": "memory",
|
26
|
+
"SESSION_PROVIDER": "memory",
|
27
|
+
"ARTIFACT_BUCKET": "mcp-artifacts"
|
28
|
+
}
|
29
|
+
|
30
|
+
for key, value in env_vars.items():
|
31
|
+
os.environ[key] = value
|
32
|
+
|
33
|
+
return env_vars
|
34
|
+
|
35
|
+
|
36
|
+
def configure_filesystem(root: str = "./artifacts") -> Dict[str, str]:
|
37
|
+
"""
|
38
|
+
Configure for local filesystem storage.
|
39
|
+
|
40
|
+
Parameters
|
41
|
+
----------
|
42
|
+
root : str
|
43
|
+
Root directory for artifact storage
|
44
|
+
|
45
|
+
Returns
|
46
|
+
-------
|
47
|
+
dict
|
48
|
+
Environment variables that were set
|
49
|
+
"""
|
50
|
+
env_vars = {
|
51
|
+
"ARTIFACT_PROVIDER": "filesystem",
|
52
|
+
"SESSION_PROVIDER": "memory",
|
53
|
+
"ARTIFACT_FS_ROOT": root,
|
54
|
+
"ARTIFACT_BUCKET": "mcp-artifacts"
|
55
|
+
}
|
56
|
+
|
57
|
+
for key, value in env_vars.items():
|
58
|
+
os.environ[key] = value
|
59
|
+
|
60
|
+
return env_vars
|
61
|
+
|
62
|
+
|
63
|
+
def configure_s3(
|
64
|
+
*,
|
65
|
+
access_key: str,
|
66
|
+
secret_key: str,
|
67
|
+
bucket: str,
|
68
|
+
endpoint_url: Optional[str] = None,
|
69
|
+
region: str = "us-east-1",
|
70
|
+
session_provider: str = "memory"
|
71
|
+
) -> Dict[str, str]:
|
72
|
+
"""
|
73
|
+
Configure for S3-compatible storage.
|
74
|
+
|
75
|
+
Parameters
|
76
|
+
----------
|
77
|
+
access_key : str
|
78
|
+
AWS access key ID
|
79
|
+
secret_key : str
|
80
|
+
AWS secret access key
|
81
|
+
bucket : str
|
82
|
+
S3 bucket name
|
83
|
+
endpoint_url : str, optional
|
84
|
+
Custom S3 endpoint (for MinIO, DigitalOcean, etc.)
|
85
|
+
region : str
|
86
|
+
AWS region
|
87
|
+
session_provider : str
|
88
|
+
Session provider (memory or redis)
|
89
|
+
|
90
|
+
Returns
|
91
|
+
-------
|
92
|
+
dict
|
93
|
+
Environment variables that were set
|
94
|
+
"""
|
95
|
+
env_vars = {
|
96
|
+
"ARTIFACT_PROVIDER": "s3",
|
97
|
+
"SESSION_PROVIDER": session_provider,
|
98
|
+
"AWS_ACCESS_KEY_ID": access_key,
|
99
|
+
"AWS_SECRET_ACCESS_KEY": secret_key,
|
100
|
+
"AWS_REGION": region,
|
101
|
+
"ARTIFACT_BUCKET": bucket
|
102
|
+
}
|
103
|
+
|
104
|
+
if endpoint_url:
|
105
|
+
env_vars["S3_ENDPOINT_URL"] = endpoint_url
|
106
|
+
|
107
|
+
for key, value in env_vars.items():
|
108
|
+
os.environ[key] = value
|
109
|
+
|
110
|
+
return env_vars
|
111
|
+
|
112
|
+
|
113
|
+
def configure_redis_session(redis_url: str = "redis://localhost:6379/0") -> Dict[str, str]:
|
114
|
+
"""
|
115
|
+
Configure Redis for session storage.
|
116
|
+
|
117
|
+
Parameters
|
118
|
+
----------
|
119
|
+
redis_url : str
|
120
|
+
Redis connection URL
|
121
|
+
|
122
|
+
Returns
|
123
|
+
-------
|
124
|
+
dict
|
125
|
+
Environment variables that were set
|
126
|
+
"""
|
127
|
+
env_vars = {
|
128
|
+
"SESSION_PROVIDER": "redis",
|
129
|
+
"SESSION_REDIS_URL": redis_url
|
130
|
+
}
|
131
|
+
|
132
|
+
for key, value in env_vars.items():
|
133
|
+
os.environ[key] = value
|
134
|
+
|
135
|
+
return env_vars
|
136
|
+
|
137
|
+
|
138
|
+
def configure_ibm_cos(
|
139
|
+
*,
|
140
|
+
access_key: str,
|
141
|
+
secret_key: str,
|
142
|
+
bucket: str,
|
143
|
+
endpoint: str = "https://s3.us-south.cloud-object-storage.appdomain.cloud",
|
144
|
+
region: str = "us-south",
|
145
|
+
session_provider: str = "memory"
|
146
|
+
) -> Dict[str, str]:
|
147
|
+
"""
|
148
|
+
Configure for IBM Cloud Object Storage (HMAC).
|
149
|
+
|
150
|
+
Parameters
|
151
|
+
----------
|
152
|
+
access_key : str
|
153
|
+
HMAC access key
|
154
|
+
secret_key : str
|
155
|
+
HMAC secret key
|
156
|
+
bucket : str
|
157
|
+
COS bucket name
|
158
|
+
endpoint : str
|
159
|
+
IBM COS endpoint URL
|
160
|
+
region : str
|
161
|
+
IBM COS region
|
162
|
+
session_provider : str
|
163
|
+
Session provider (memory or redis)
|
164
|
+
|
165
|
+
Returns
|
166
|
+
-------
|
167
|
+
dict
|
168
|
+
Environment variables that were set
|
169
|
+
"""
|
170
|
+
env_vars = {
|
171
|
+
"ARTIFACT_PROVIDER": "ibm_cos",
|
172
|
+
"SESSION_PROVIDER": session_provider,
|
173
|
+
"AWS_ACCESS_KEY_ID": access_key,
|
174
|
+
"AWS_SECRET_ACCESS_KEY": secret_key,
|
175
|
+
"AWS_REGION": region,
|
176
|
+
"IBM_COS_ENDPOINT": endpoint,
|
177
|
+
"ARTIFACT_BUCKET": bucket
|
178
|
+
}
|
179
|
+
|
180
|
+
for key, value in env_vars.items():
|
181
|
+
os.environ[key] = value
|
182
|
+
|
183
|
+
return env_vars
|
184
|
+
|
185
|
+
|
186
|
+
def configure_ibm_cos_iam(
|
187
|
+
*,
|
188
|
+
api_key: str,
|
189
|
+
instance_crn: str,
|
190
|
+
bucket: str,
|
191
|
+
endpoint: str = "https://s3.us-south.cloud-object-storage.appdomain.cloud",
|
192
|
+
session_provider: str = "memory"
|
193
|
+
) -> Dict[str, str]:
|
194
|
+
"""
|
195
|
+
Configure for IBM Cloud Object Storage (IAM).
|
196
|
+
|
197
|
+
Parameters
|
198
|
+
----------
|
199
|
+
api_key : str
|
200
|
+
IBM Cloud API key
|
201
|
+
instance_crn : str
|
202
|
+
COS instance CRN
|
203
|
+
bucket : str
|
204
|
+
COS bucket name
|
205
|
+
endpoint : str
|
206
|
+
IBM COS endpoint URL
|
207
|
+
session_provider : str
|
208
|
+
Session provider (memory or redis)
|
209
|
+
|
210
|
+
Returns
|
211
|
+
-------
|
212
|
+
dict
|
213
|
+
Environment variables that were set
|
214
|
+
"""
|
215
|
+
env_vars = {
|
216
|
+
"ARTIFACT_PROVIDER": "ibm_cos_iam",
|
217
|
+
"SESSION_PROVIDER": session_provider,
|
218
|
+
"IBM_COS_APIKEY": api_key,
|
219
|
+
"IBM_COS_INSTANCE_CRN": instance_crn,
|
220
|
+
"IBM_COS_ENDPOINT": endpoint,
|
221
|
+
"ARTIFACT_BUCKET": bucket
|
222
|
+
}
|
223
|
+
|
224
|
+
for key, value in env_vars.items():
|
225
|
+
os.environ[key] = value
|
226
|
+
|
227
|
+
return env_vars
|
228
|
+
|
229
|
+
|
230
|
+
def create_store() -> ArtifactStore:
|
231
|
+
"""
|
232
|
+
Create a new ArtifactStore instance with current environment configuration.
|
233
|
+
|
234
|
+
Returns
|
235
|
+
-------
|
236
|
+
ArtifactStore
|
237
|
+
Configured store instance
|
238
|
+
"""
|
239
|
+
return ArtifactStore()
|
240
|
+
|
241
|
+
|
242
|
+
# Convenience functions for common setups
|
243
|
+
def development_setup() -> ArtifactStore:
|
244
|
+
"""Set up for local development (memory storage)."""
|
245
|
+
configure_memory()
|
246
|
+
return create_store()
|
247
|
+
|
248
|
+
|
249
|
+
def testing_setup(artifacts_dir: str = "./test-artifacts") -> ArtifactStore:
|
250
|
+
"""Set up for testing (filesystem storage)."""
|
251
|
+
configure_filesystem(artifacts_dir)
|
252
|
+
return create_store()
|
253
|
+
|
254
|
+
|
255
|
+
def production_setup(
|
256
|
+
*,
|
257
|
+
storage_type: str,
|
258
|
+
**kwargs
|
259
|
+
) -> ArtifactStore:
|
260
|
+
"""
|
261
|
+
Set up for production use.
|
262
|
+
|
263
|
+
Parameters
|
264
|
+
----------
|
265
|
+
storage_type : str
|
266
|
+
Storage type: 's3', 'ibm_cos', 'ibm_cos_iam'
|
267
|
+
**kwargs
|
268
|
+
Configuration parameters for the chosen storage type
|
269
|
+
|
270
|
+
Returns
|
271
|
+
-------
|
272
|
+
ArtifactStore
|
273
|
+
Configured store instance
|
274
|
+
"""
|
275
|
+
if storage_type == "s3":
|
276
|
+
configure_s3(**kwargs)
|
277
|
+
elif storage_type == "ibm_cos":
|
278
|
+
configure_ibm_cos(**kwargs)
|
279
|
+
elif storage_type == "ibm_cos_iam":
|
280
|
+
configure_ibm_cos_iam(**kwargs)
|
281
|
+
else:
|
282
|
+
raise ValueError(f"Unknown storage type: {storage_type}")
|
283
|
+
|
284
|
+
return create_store()
|
285
|
+
|
286
|
+
|
287
|
+
# Usage examples in docstring
|
288
|
+
__doc__ += """
|
289
|
+
|
290
|
+
Usage Examples
|
291
|
+
--------------
|
292
|
+
|
293
|
+
**Quick development setup:**
|
294
|
+
```python
|
295
|
+
from chuk_artifacts.config import development_setup
|
296
|
+
|
297
|
+
store = development_setup() # Uses memory, no persistence
|
298
|
+
```
|
299
|
+
|
300
|
+
**Testing with filesystem:**
|
301
|
+
```python
|
302
|
+
from chuk_artifacts.config import testing_setup
|
303
|
+
|
304
|
+
store = testing_setup("./test-data") # Persists to filesystem
|
305
|
+
```
|
306
|
+
|
307
|
+
**Production with S3:**
|
308
|
+
```python
|
309
|
+
from chuk_artifacts.config import production_setup
|
310
|
+
|
311
|
+
store = production_setup(
|
312
|
+
storage_type="s3",
|
313
|
+
access_key="AKIA...",
|
314
|
+
secret_key="...",
|
315
|
+
bucket="prod-artifacts",
|
316
|
+
session_provider="redis"
|
317
|
+
)
|
318
|
+
```
|
319
|
+
|
320
|
+
**Custom configuration:**
|
321
|
+
```python
|
322
|
+
from chuk_artifacts.config import configure_s3, configure_redis_session, create_store
|
323
|
+
|
324
|
+
# Set up S3 storage
|
325
|
+
configure_s3(
|
326
|
+
access_key="AKIA...",
|
327
|
+
secret_key="...",
|
328
|
+
bucket="my-bucket",
|
329
|
+
endpoint_url="https://nyc3.digitaloceanspaces.com" # DigitalOcean Spaces
|
330
|
+
)
|
331
|
+
|
332
|
+
# Set up Redis sessions
|
333
|
+
configure_redis_session("redis://localhost:6379/1")
|
334
|
+
|
335
|
+
# Create store with this configuration
|
336
|
+
store = create_store()
|
337
|
+
```
|
338
|
+
"""
|
chuk_artifacts/core.py
ADDED
@@ -0,0 +1,215 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
# chuk_artifacts/core.py
|
3
|
+
"""
|
4
|
+
core storage operations.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
import uuid, hashlib, time, asyncio, logging, json
|
10
|
+
from datetime import datetime
|
11
|
+
from typing import Any, Dict, Optional, TYPE_CHECKING
|
12
|
+
|
13
|
+
if TYPE_CHECKING:
|
14
|
+
from .store import ArtifactStore
|
15
|
+
|
16
|
+
from .exceptions import (
|
17
|
+
ArtifactStoreError, ArtifactNotFoundError, ArtifactExpiredError,
|
18
|
+
ArtifactCorruptedError, ProviderError, SessionError
|
19
|
+
)
|
20
|
+
|
21
|
+
logger = logging.getLogger(__name__)
|
22
|
+
|
23
|
+
_ANON_PREFIX = "anon"
|
24
|
+
_DEFAULT_TTL = 900
|
25
|
+
|
26
|
+
|
27
|
+
class CoreStorageOperations:
|
28
|
+
"""core storage operations without BaseOperations inheritance."""
|
29
|
+
|
30
|
+
def __init__(self, artifact_store: 'ArtifactStore'):
|
31
|
+
self.artifact_store = artifact_store # Renamed to avoid conflicts
|
32
|
+
logger.info(f"CoreStorageOperations initialized with store: {type(artifact_store)}")
|
33
|
+
|
34
|
+
async def store(
|
35
|
+
self,
|
36
|
+
data: bytes,
|
37
|
+
*,
|
38
|
+
mime: str,
|
39
|
+
summary: str,
|
40
|
+
meta: Dict[str, Any] | None = None,
|
41
|
+
filename: str | None = None,
|
42
|
+
session_id: str | None = None,
|
43
|
+
ttl: int = _DEFAULT_TTL,
|
44
|
+
) -> str:
|
45
|
+
"""Store artifact data with metadata."""
|
46
|
+
if self.artifact_store._closed:
|
47
|
+
raise ArtifactStoreError("Store has been closed")
|
48
|
+
|
49
|
+
start_time = time.time()
|
50
|
+
artifact_id = uuid.uuid4().hex
|
51
|
+
|
52
|
+
scope = session_id or f"{_ANON_PREFIX}_{artifact_id}"
|
53
|
+
key = f"sess/{scope}/{artifact_id}"
|
54
|
+
|
55
|
+
try:
|
56
|
+
# Store in object storage with retries
|
57
|
+
await self._store_with_retry(data, key, mime, filename, scope)
|
58
|
+
|
59
|
+
# Build metadata record
|
60
|
+
record = {
|
61
|
+
"scope": scope,
|
62
|
+
"key": key,
|
63
|
+
"mime": mime,
|
64
|
+
"summary": summary,
|
65
|
+
"meta": meta or {},
|
66
|
+
"filename": filename,
|
67
|
+
"bytes": len(data),
|
68
|
+
"sha256": hashlib.sha256(data).hexdigest(),
|
69
|
+
"stored_at": datetime.utcnow().isoformat(timespec="seconds") + "Z",
|
70
|
+
"ttl": ttl,
|
71
|
+
"storage_provider": self.artifact_store._storage_provider_name,
|
72
|
+
"session_provider": self.artifact_store._session_provider_name,
|
73
|
+
}
|
74
|
+
|
75
|
+
# Cache metadata using session provider
|
76
|
+
session_ctx_mgr = self.artifact_store._session_factory()
|
77
|
+
async with session_ctx_mgr as session:
|
78
|
+
await session.setex(artifact_id, ttl, json.dumps(record))
|
79
|
+
|
80
|
+
duration_ms = int((time.time() - start_time) * 1000)
|
81
|
+
logger.info(
|
82
|
+
"Artifact stored successfully",
|
83
|
+
extra={
|
84
|
+
"artifact_id": artifact_id,
|
85
|
+
"bytes": len(data),
|
86
|
+
"mime": mime,
|
87
|
+
"duration_ms": duration_ms,
|
88
|
+
"storage_provider": self.artifact_store._storage_provider_name,
|
89
|
+
}
|
90
|
+
)
|
91
|
+
|
92
|
+
return artifact_id
|
93
|
+
|
94
|
+
except Exception as e:
|
95
|
+
duration_ms = int((time.time() - start_time) * 1000)
|
96
|
+
logger.error(
|
97
|
+
"Artifact storage failed",
|
98
|
+
extra={
|
99
|
+
"artifact_id": artifact_id,
|
100
|
+
"error": str(e),
|
101
|
+
"duration_ms": duration_ms,
|
102
|
+
"storage_provider": self.artifact_store._storage_provider_name,
|
103
|
+
},
|
104
|
+
exc_info=True
|
105
|
+
)
|
106
|
+
|
107
|
+
if "session" in str(e).lower() or "redis" in str(e).lower():
|
108
|
+
raise SessionError(f"Metadata caching failed: {e}") from e
|
109
|
+
else:
|
110
|
+
raise ProviderError(f"Storage operation failed: {e}") from e
|
111
|
+
|
112
|
+
async def _store_with_retry(self, data: bytes, key: str, mime: str, filename: str, scope: str):
|
113
|
+
"""Store data with retry logic."""
|
114
|
+
last_exception = None
|
115
|
+
|
116
|
+
for attempt in range(self.artifact_store.max_retries):
|
117
|
+
try:
|
118
|
+
storage_ctx_mgr = self.artifact_store._s3_factory()
|
119
|
+
async with storage_ctx_mgr as s3:
|
120
|
+
await s3.put_object(
|
121
|
+
Bucket=self.artifact_store.bucket,
|
122
|
+
Key=key,
|
123
|
+
Body=data,
|
124
|
+
ContentType=mime,
|
125
|
+
Metadata={"filename": filename or "", "scope": scope},
|
126
|
+
)
|
127
|
+
return # Success
|
128
|
+
|
129
|
+
except Exception as e:
|
130
|
+
last_exception = e
|
131
|
+
if attempt < self.artifact_store.max_retries - 1:
|
132
|
+
wait_time = 2 ** attempt # Exponential backoff
|
133
|
+
logger.warning(
|
134
|
+
f"Storage attempt {attempt + 1} failed, retrying in {wait_time}s",
|
135
|
+
extra={"error": str(e), "attempt": attempt + 1}
|
136
|
+
)
|
137
|
+
await asyncio.sleep(wait_time)
|
138
|
+
else:
|
139
|
+
logger.error(f"All {self.artifact_store.max_retries} storage attempts failed")
|
140
|
+
|
141
|
+
raise last_exception
|
142
|
+
|
143
|
+
async def retrieve(self, artifact_id: str) -> bytes:
|
144
|
+
"""Retrieve artifact data directly."""
|
145
|
+
if self.artifact_store._closed:
|
146
|
+
raise ArtifactStoreError("Store has been closed")
|
147
|
+
|
148
|
+
start_time = time.time()
|
149
|
+
|
150
|
+
try:
|
151
|
+
record = await self._get_record(artifact_id)
|
152
|
+
|
153
|
+
storage_ctx_mgr = self.artifact_store._s3_factory()
|
154
|
+
async with storage_ctx_mgr as s3:
|
155
|
+
response = await s3.get_object(Bucket=self.artifact_store.bucket, Key=record["key"])
|
156
|
+
|
157
|
+
# Handle different response formats from different providers
|
158
|
+
if hasattr(response["Body"], "read"):
|
159
|
+
data = await response["Body"].read()
|
160
|
+
elif isinstance(response["Body"], bytes):
|
161
|
+
data = response["Body"]
|
162
|
+
else:
|
163
|
+
data = bytes(response["Body"])
|
164
|
+
|
165
|
+
# Verify integrity if SHA256 is available
|
166
|
+
if "sha256" in record and record["sha256"]:
|
167
|
+
computed_hash = hashlib.sha256(data).hexdigest()
|
168
|
+
if computed_hash != record["sha256"]:
|
169
|
+
raise ArtifactCorruptedError(
|
170
|
+
f"SHA256 mismatch: expected {record['sha256']}, got {computed_hash}"
|
171
|
+
)
|
172
|
+
|
173
|
+
duration_ms = int((time.time() - start_time) * 1000)
|
174
|
+
logger.info(
|
175
|
+
"Artifact retrieved successfully",
|
176
|
+
extra={
|
177
|
+
"artifact_id": artifact_id,
|
178
|
+
"bytes": len(data),
|
179
|
+
"duration_ms": duration_ms,
|
180
|
+
}
|
181
|
+
)
|
182
|
+
|
183
|
+
return data
|
184
|
+
|
185
|
+
except (ArtifactNotFoundError, ArtifactExpiredError, ArtifactCorruptedError):
|
186
|
+
raise
|
187
|
+
except Exception as e:
|
188
|
+
duration_ms = int((time.time() - start_time) * 1000)
|
189
|
+
logger.error(
|
190
|
+
"Artifact retrieval failed",
|
191
|
+
extra={
|
192
|
+
"artifact_id": artifact_id,
|
193
|
+
"error": str(e),
|
194
|
+
"duration_ms": duration_ms,
|
195
|
+
}
|
196
|
+
)
|
197
|
+
raise ProviderError(f"Retrieval failed: {e}") from e
|
198
|
+
|
199
|
+
async def _get_record(self, artifact_id: str) -> Dict[str, Any]:
|
200
|
+
"""Retrieve artifact metadata from session provider."""
|
201
|
+
try:
|
202
|
+
session_ctx_mgr = self.artifact_store._session_factory()
|
203
|
+
async with session_ctx_mgr as session:
|
204
|
+
raw = await session.get(artifact_id)
|
205
|
+
except Exception as e:
|
206
|
+
raise SessionError(f"Session provider error retrieving {artifact_id}: {e}") from e
|
207
|
+
|
208
|
+
if raw is None:
|
209
|
+
raise ArtifactNotFoundError(f"Artifact {artifact_id} not found or expired")
|
210
|
+
|
211
|
+
try:
|
212
|
+
return json.loads(raw)
|
213
|
+
except json.JSONDecodeError as e:
|
214
|
+
logger.error(f"Corrupted metadata for artifact {artifact_id}: {e}")
|
215
|
+
raise ArtifactCorruptedError(f"Corrupted metadata for artifact {artifact_id}") from e
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
# chuk_artifacts/exceptions.py
|
3
|
+
"""
|
4
|
+
Exception classes for artifact store operations.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from __future__ import annotations
|
8
|
+
|
9
|
+
|
10
|
+
class ArtifactStoreError(Exception):
|
11
|
+
"""Base exception for artifact store operations."""
|
12
|
+
pass
|
13
|
+
|
14
|
+
|
15
|
+
class ArtifactNotFoundError(ArtifactStoreError):
|
16
|
+
"""Raised when an artifact cannot be found."""
|
17
|
+
pass
|
18
|
+
|
19
|
+
|
20
|
+
class ArtifactExpiredError(ArtifactStoreError):
|
21
|
+
"""Raised when an artifact has expired."""
|
22
|
+
pass
|
23
|
+
|
24
|
+
|
25
|
+
class ArtifactCorruptedError(ArtifactStoreError):
|
26
|
+
"""Raised when artifact metadata is corrupted."""
|
27
|
+
pass
|
28
|
+
|
29
|
+
|
30
|
+
class ProviderError(ArtifactStoreError):
|
31
|
+
"""Raised when the storage provider encounters an error."""
|
32
|
+
pass
|
33
|
+
|
34
|
+
|
35
|
+
class SessionError(ArtifactStoreError):
|
36
|
+
"""Raised when the session provider encounters an error."""
|
37
|
+
pass
|