chuk-artifacts 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
chuk_artifacts/store.py CHANGED
@@ -1,12 +1,19 @@
1
1
  # -*- coding: utf-8 -*-
2
- # chuk_artifacts/store.py (ENHANCED)
2
+ # chuk_artifacts/store.py
3
3
  """
4
- Asynchronous, object-store-backed artefact manager with MCP server support.
4
+ Clean ArtifactStore with mandatory sessions and grid architecture.
5
+
6
+ Grid Architecture:
7
+ - Mandatory session allocation (no anonymous artifacts)
8
+ - Grid paths: grid/{sandbox_id}/{session_id}/{artifact_id}
9
+ - Clean, focused implementation
10
+ - Now uses chuk_sessions for session management
5
11
  """
6
12
 
7
13
  from __future__ import annotations
8
14
 
9
- import os, logging
15
+ import os, logging, uuid
16
+ from datetime import datetime
10
17
  from typing import Any, Dict, List, Callable, AsyncContextManager, Optional, Union
11
18
 
12
19
  try:
@@ -25,7 +32,10 @@ except ImportError:
25
32
  logger.debug("python-dotenv not available, skipping .env file loading")
26
33
 
27
34
  # Import exceptions
28
- from .exceptions import ArtifactStoreError
35
+ from .exceptions import ArtifactStoreError, ProviderError
36
+
37
+ # Import chuk_sessions instead of local session manager
38
+ from chuk_sessions.session_manager import SessionManager
29
39
 
30
40
  # Configure structured logging
31
41
  logger = logging.getLogger(__name__)
@@ -51,104 +61,73 @@ def _default_session_factory() -> Callable[[], AsyncContextManager]:
51
61
  # ─────────────────────────────────────────────────────────────────────
52
62
  class ArtifactStore:
53
63
  """
54
- Asynchronous artifact storage with MCP server support.
64
+ Clean ArtifactStore with grid architecture and mandatory sessions.
55
65
 
56
- Enhanced with MCP-specific operations for file management within sessions.
66
+ Simple rules:
67
+ - Always allocate a session (no anonymous artifacts)
68
+ - Grid paths only: grid/{sandbox_id}/{session_id}/{artifact_id}
69
+ - Clean, focused implementation
70
+ - Uses chuk_sessions for session management
57
71
  """
58
72
 
59
73
  def __init__(
60
74
  self,
61
75
  *,
62
76
  bucket: Optional[str] = None,
63
- s3_factory: Optional[Callable[[], AsyncContextManager]] = None,
64
77
  storage_provider: Optional[str] = None,
65
- session_factory: Optional[Callable[[], AsyncContextManager]] = None,
66
78
  session_provider: Optional[str] = None,
79
+ sandbox_id: Optional[str] = None,
80
+ session_ttl_hours: int = 24,
67
81
  max_retries: int = 3,
68
- # Backward compatibility - deprecated but still supported
69
- redis_url: Optional[str] = None,
70
- provider: Optional[str] = None,
71
82
  ):
72
- # Read from environment variables with memory as defaults
73
- bucket = bucket or os.getenv("ARTIFACT_BUCKET", "mcp-bucket")
83
+ # Configuration
84
+ self.bucket = bucket or os.getenv("ARTIFACT_BUCKET", "artifacts")
85
+ self.sandbox_id = sandbox_id or self._detect_sandbox_id()
86
+ self.session_ttl_hours = session_ttl_hours
87
+ self.max_retries = max_retries
88
+ self._closed = False
89
+
90
+ # Storage provider
74
91
  storage_provider = storage_provider or os.getenv("ARTIFACT_PROVIDER", "memory")
92
+ self._s3_factory = self._load_storage_provider(storage_provider)
93
+ self._storage_provider_name = storage_provider
94
+
95
+ # Session provider
75
96
  session_provider = session_provider or os.getenv("SESSION_PROVIDER", "memory")
97
+ self._session_factory = self._load_session_provider(session_provider)
98
+ self._session_provider_name = session_provider
76
99
 
77
- # Handle backward compatibility
78
- if redis_url is not None:
79
- import warnings
80
- warnings.warn(
81
- "redis_url parameter is deprecated. Use session_provider='redis' "
82
- "and set SESSION_REDIS_URL environment variable instead.",
83
- DeprecationWarning,
84
- stacklevel=2
85
- )
86
- os.environ["SESSION_REDIS_URL"] = redis_url
87
- session_provider = "redis"
88
-
89
- if provider is not None:
90
- import warnings
91
- warnings.warn(
92
- "provider parameter is deprecated. Use storage_provider instead.",
93
- DeprecationWarning,
94
- stacklevel=2
95
- )
96
- storage_provider = provider
97
-
98
- # Validate factory/provider combinations
99
- if s3_factory and storage_provider:
100
- raise ValueError("Specify either s3_factory or storage_provider—not both")
101
- if session_factory and session_provider:
102
- raise ValueError("Specify either session_factory or session_provider—not both")
103
-
104
- # Initialize storage factory
105
- if s3_factory:
106
- self._s3_factory = s3_factory
107
- elif storage_provider:
108
- self._s3_factory = self._load_storage_provider(storage_provider)
109
- else:
110
- self._s3_factory = _default_storage_factory()
111
-
112
- # Initialize session factory
113
- if session_factory:
114
- self._session_factory = session_factory
115
- elif session_provider:
116
- self._session_factory = self._load_session_provider(session_provider)
117
- else:
118
- self._session_factory = _default_session_factory()
119
-
120
- self.bucket = bucket
121
- self.max_retries = max_retries
122
- self._storage_provider_name = storage_provider or "memory"
123
- self._session_provider_name = session_provider or "memory"
124
- self._closed = False
125
-
126
- # Initialize operation modules
127
- from .core import CoreStorageOperations
128
- from .presigned import PresignedURLOperations
129
- from .metadata import MetadataOperations
130
- from .batch import BatchOperations
131
- from .admin import AdminOperations
132
- from .session_operations import SessionOperations
100
+ # Session manager (now using chuk_sessions)
101
+ self._session_manager = SessionManager(
102
+ sandbox_id=self.sandbox_id,
103
+ default_ttl_hours=session_ttl_hours,
104
+ )
105
+
106
+ # Operation modules
107
+ from .core import CoreStorageOperations as CoreOps
108
+ from .metadata import MetadataOperations as MetaOps
109
+ from .presigned import PresignedURLOperations as PresignedOps
110
+ from .batch import BatchOperations as BatchOps
111
+ from .admin import AdminOperations as AdminOps
112
+
113
+ self._core = CoreOps(self)
114
+ self._metadata = MetaOps(self)
115
+ self._presigned = PresignedOps(self)
116
+ self._batch = BatchOps(self)
117
+ self._admin = AdminOps(self)
133
118
 
134
- self._core = CoreStorageOperations(self)
135
- self._presigned = PresignedURLOperations(self)
136
- self._metadata = MetadataOperations(self)
137
- self._batch = BatchOperations(self)
138
- self._admin = AdminOperations(self)
139
- self._session = SessionOperations(self)
140
-
141
119
  logger.info(
142
- "ArtifactStore initialized with session operations support",
120
+ "ArtifactStore initialized",
143
121
  extra={
144
- "bucket": bucket,
145
- "storage_provider": self._storage_provider_name,
146
- "session_provider": self._session_provider_name,
122
+ "bucket": self.bucket,
123
+ "sandbox_id": self.sandbox_id,
124
+ "storage_provider": storage_provider,
125
+ "session_provider": session_provider,
147
126
  }
148
127
  )
149
128
 
150
129
  # ─────────────────────────────────────────────────────────────────
151
- # Core storage operations (delegated to CoreStorageOperations)
130
+ # Core operations
152
131
  # ─────────────────────────────────────────────────────────────────
153
132
 
154
133
  async def store(
@@ -160,11 +139,19 @@ class ArtifactStore:
160
139
  meta: Dict[str, Any] | None = None,
161
140
  filename: str | None = None,
162
141
  session_id: str | None = None,
142
+ user_id: str | None = None,
163
143
  ttl: int = _DEFAULT_TTL,
164
144
  ) -> str:
165
- """Store artifact data with metadata."""
145
+ """Store artifact with mandatory session allocation."""
146
+ # Always allocate/validate session using chuk_sessions
147
+ session_id = await self._session_manager.allocate_session(
148
+ session_id=session_id,
149
+ user_id=user_id,
150
+ )
151
+
152
+ # Store using core operations
166
153
  return await self._core.store(
167
- data,
154
+ data=data,
168
155
  mime=mime,
169
156
  summary=summary,
170
157
  meta=meta,
@@ -174,11 +161,260 @@ class ArtifactStore:
174
161
  )
175
162
 
176
163
  async def retrieve(self, artifact_id: str) -> bytes:
177
- """Retrieve artifact data directly."""
164
+ """Retrieve artifact data."""
178
165
  return await self._core.retrieve(artifact_id)
179
166
 
167
+ async def metadata(self, artifact_id: str) -> Dict[str, Any]:
168
+ """Get artifact metadata."""
169
+ return await self._metadata.get_metadata(artifact_id)
170
+
171
+ async def exists(self, artifact_id: str) -> bool:
172
+ """Check if artifact exists."""
173
+ return await self._metadata.exists(artifact_id)
174
+
175
+ async def delete(self, artifact_id: str) -> bool:
176
+ """Delete artifact."""
177
+ return await self._metadata.delete(artifact_id)
178
+
179
+ async def list_by_session(self, session_id: str, limit: int = 100) -> List[Dict[str, Any]]:
180
+ """List artifacts in session."""
181
+ return await self._metadata.list_by_session(session_id, limit)
182
+
180
183
  # ─────────────────────────────────────────────────────────────────
181
- # Presigned URL operations (delegated to PresignedURLOperations)
184
+ # Session operations - now delegated to chuk_sessions
185
+ # ─────────────────────────────────────────────────────────────────
186
+
187
+ async def create_session(
188
+ self,
189
+ user_id: Optional[str] = None,
190
+ ttl_hours: Optional[int] = None,
191
+ custom_metadata: Optional[Dict[str, Any]] = None,
192
+ ) -> str:
193
+ """Create a new session."""
194
+ return await self._session_manager.allocate_session(
195
+ user_id=user_id,
196
+ ttl_hours=ttl_hours,
197
+ custom_metadata=custom_metadata,
198
+ )
199
+
200
+ async def validate_session(self, session_id: str) -> bool:
201
+ """Validate session."""
202
+ return await self._session_manager.validate_session(session_id)
203
+
204
+ async def get_session_info(self, session_id: str) -> Optional[Dict[str, Any]]:
205
+ """Get session information."""
206
+ return await self._session_manager.get_session_info(session_id)
207
+
208
+ async def update_session_metadata(
209
+ self,
210
+ session_id: str,
211
+ metadata: Dict[str, Any]
212
+ ) -> bool:
213
+ """Update session metadata."""
214
+ return await self._session_manager.update_session_metadata(session_id, metadata)
215
+
216
+ async def extend_session_ttl(
217
+ self,
218
+ session_id: str,
219
+ additional_hours: int
220
+ ) -> bool:
221
+ """Extend session TTL."""
222
+ return await self._session_manager.extend_session_ttl(session_id, additional_hours)
223
+
224
+ async def delete_session(self, session_id: str) -> bool:
225
+ """Delete session."""
226
+ return await self._session_manager.delete_session(session_id)
227
+
228
+ async def cleanup_expired_sessions(self) -> int:
229
+ """Clean up expired sessions."""
230
+ return await self._session_manager.cleanup_expired_sessions()
231
+
232
+ # ─────────────────────────────────────────────────────────────────
233
+ # Grid operations - now delegated to chuk_sessions
234
+ # ─────────────────────────────────────────────────────────────────
235
+
236
+ def get_canonical_prefix(self, session_id: str) -> str:
237
+ """Get grid path prefix for session."""
238
+ return self._session_manager.get_canonical_prefix(session_id)
239
+
240
+ def generate_artifact_key(self, session_id: str, artifact_id: str) -> str:
241
+ """Generate grid artifact key."""
242
+ return self._session_manager.generate_artifact_key(session_id, artifact_id)
243
+
244
+ def parse_grid_key(self, grid_key: str) -> Optional[Dict[str, Any]]:
245
+ """Parse grid key to extract components."""
246
+ return self._session_manager.parse_grid_key(grid_key)
247
+
248
+ def get_session_prefix_pattern(self) -> str:
249
+ """Get session prefix pattern for this sandbox."""
250
+ return self._session_manager.get_session_prefix_pattern()
251
+
252
+ # ─────────────────────────────────────────────────────────────────
253
+ # File operations
254
+ # ─────────────────────────────────────────────────────────────────
255
+
256
+ async def write_file(
257
+ self,
258
+ content: Union[str, bytes],
259
+ *,
260
+ filename: str,
261
+ mime: str = "text/plain",
262
+ summary: str = "",
263
+ session_id: str = None,
264
+ user_id: str = None,
265
+ meta: Dict[str, Any] = None,
266
+ encoding: str = "utf-8",
267
+ ) -> str:
268
+ """Write content to file."""
269
+ if isinstance(content, str):
270
+ data = content.encode(encoding)
271
+ else:
272
+ data = content
273
+
274
+ return await self.store(
275
+ data=data,
276
+ mime=mime,
277
+ summary=summary or f"File: {filename}",
278
+ filename=filename,
279
+ session_id=session_id,
280
+ user_id=user_id,
281
+ meta=meta,
282
+ )
283
+
284
+ async def read_file(
285
+ self,
286
+ artifact_id: str,
287
+ *,
288
+ encoding: str = "utf-8",
289
+ as_text: bool = True
290
+ ) -> Union[str, bytes]:
291
+ """Read file content."""
292
+ data = await self.retrieve(artifact_id)
293
+
294
+ if as_text:
295
+ return data.decode(encoding)
296
+ return data
297
+
298
+ async def list_files(
299
+ self,
300
+ session_id: str,
301
+ prefix: str = "",
302
+ limit: int = 100
303
+ ) -> List[Dict[str, Any]]:
304
+ """List files in session with optional prefix filter."""
305
+ return await self._metadata.list_by_prefix(session_id, prefix, limit)
306
+
307
+ async def get_directory_contents(
308
+ self,
309
+ session_id: str,
310
+ directory_prefix: str = "",
311
+ limit: int = 100
312
+ ) -> List[Dict[str, Any]]:
313
+ """
314
+ List files in a directory-like structure within a session.
315
+ """
316
+ try:
317
+ return await self._metadata.list_by_prefix(session_id, directory_prefix, limit)
318
+ except Exception as e:
319
+ logger.error(
320
+ "Directory listing failed for session %s: %s",
321
+ session_id,
322
+ str(e),
323
+ extra={
324
+ "session_id": session_id,
325
+ "directory_prefix": directory_prefix,
326
+ "operation": "get_directory_contents"
327
+ }
328
+ )
329
+ raise ProviderError(f"Directory listing failed: {e}") from e
330
+
331
+ async def copy_file(
332
+ self,
333
+ artifact_id: str,
334
+ *,
335
+ new_filename: str = None,
336
+ target_session_id: str = None,
337
+ new_meta: Dict[str, Any] = None,
338
+ summary: str = None
339
+ ) -> str:
340
+ """Copy a file WITHIN THE SAME SESSION only (security enforced)."""
341
+ # Get original metadata to check session
342
+ original_meta = await self.metadata(artifact_id)
343
+ original_session = original_meta.get("session_id")
344
+
345
+ # STRICT SECURITY: Block ALL cross-session copies
346
+ if target_session_id and target_session_id != original_session:
347
+ raise ArtifactStoreError(
348
+ f"Cross-session copies are not permitted for security reasons. "
349
+ f"Artifact {artifact_id} belongs to session '{original_session}', "
350
+ f"cannot copy to session '{target_session_id}'. Files can only be "
351
+ f"copied within the same session."
352
+ )
353
+
354
+ # Get original data
355
+ original_data = await self.retrieve(artifact_id)
356
+
357
+ # Prepare copy metadata
358
+ copy_filename = new_filename or (
359
+ (original_meta.get("filename", "file") or "file") + "_copy"
360
+ )
361
+ copy_summary = summary or f"Copy of {original_meta.get('summary', 'artifact')}"
362
+
363
+ # Merge metadata
364
+ copy_meta = {**original_meta.get("meta", {})}
365
+ if new_meta:
366
+ copy_meta.update(new_meta)
367
+
368
+ # Add copy tracking
369
+ copy_meta["copied_from"] = artifact_id
370
+ copy_meta["copy_timestamp"] = datetime.utcnow().isoformat() + "Z"
371
+
372
+ # Store the copy in the same session
373
+ return await self.store(
374
+ data=original_data,
375
+ mime=original_meta["mime"],
376
+ summary=copy_summary,
377
+ filename=copy_filename,
378
+ session_id=original_session, # Always same session
379
+ meta=copy_meta
380
+ )
381
+
382
+ async def move_file(
383
+ self,
384
+ artifact_id: str,
385
+ *,
386
+ new_filename: str = None,
387
+ new_session_id: str = None,
388
+ new_meta: Dict[str, Any] = None
389
+ ) -> Dict[str, Any]:
390
+ """Move/rename a file WITHIN THE SAME SESSION only (security enforced)."""
391
+ # Get current metadata
392
+ record = await self.metadata(artifact_id)
393
+ current_session = record.get("session_id")
394
+
395
+ # STRICT SECURITY: Block ALL cross-session moves
396
+ if new_session_id and new_session_id != current_session:
397
+ raise ArtifactStoreError(
398
+ f"Cross-session moves are not permitted for security reasons. "
399
+ f"Artifact {artifact_id} belongs to session '{current_session}', "
400
+ f"cannot move to session '{new_session_id}'. Use copy operations within "
401
+ f"the same session only."
402
+ )
403
+
404
+ # For now, just simulate a move by updating metadata
405
+ # A full implementation would update the metadata record
406
+ if new_filename:
407
+ # This is a simplified move - just return updated record
408
+ record["filename"] = new_filename
409
+ if new_meta:
410
+ existing_meta = record.get("meta", {})
411
+ existing_meta.update(new_meta)
412
+ record["meta"] = existing_meta
413
+
414
+ return record
415
+
416
+ # ─────────────────────────────────────────────────────────────────
417
+ # Presigned URL operations
182
418
  # ─────────────────────────────────────────────────────────────────
183
419
 
184
420
  async def presign(self, artifact_id: str, expires: int = _DEFAULT_PRESIGN_EXPIRES) -> str:
@@ -205,12 +441,7 @@ class ArtifactStore:
205
441
  expires: int = _DEFAULT_PRESIGN_EXPIRES
206
442
  ) -> tuple[str, str]:
207
443
  """Generate a presigned URL for uploading a new artifact."""
208
- return await self._presigned.presign_upload(
209
- session_id=session_id,
210
- filename=filename,
211
- mime_type=mime_type,
212
- expires=expires
213
- )
444
+ return await self._presigned.presign_upload(session_id, filename, mime_type, expires)
214
445
 
215
446
  async def register_uploaded_artifact(
216
447
  self,
@@ -257,63 +488,7 @@ class ArtifactStore:
257
488
  )
258
489
 
259
490
  # ─────────────────────────────────────────────────────────────────
260
- # Metadata operations (delegated to MetadataOperations)
261
- # ─────────────────────────────────────────────────────────────────
262
-
263
- async def metadata(self, artifact_id: str) -> Dict[str, Any]:
264
- """Get artifact metadata."""
265
- return await self._metadata.metadata(artifact_id)
266
-
267
- async def exists(self, artifact_id: str) -> bool:
268
- """Check if artifact exists and hasn't expired."""
269
- return await self._metadata.exists(artifact_id)
270
-
271
- async def delete(self, artifact_id: str) -> bool:
272
- """Delete artifact and its metadata."""
273
- return await self._metadata.delete(artifact_id)
274
-
275
- async def update_metadata(
276
- self,
277
- artifact_id: str,
278
- *,
279
- summary: str = None,
280
- meta: Dict[str, Any] = None,
281
- filename: str = None,
282
- ttl: int = None,
283
- # NEW: MCP-specific parameters
284
- new_meta: Dict[str, Any] = None,
285
- merge: bool = True
286
- ) -> Dict[str, Any]:
287
- """Update artifact metadata with MCP server compatibility."""
288
- return await self._metadata.update_metadata(
289
- artifact_id,
290
- summary=summary,
291
- meta=meta,
292
- filename=filename,
293
- ttl=ttl,
294
- new_meta=new_meta,
295
- merge=merge
296
- )
297
-
298
- async def extend_ttl(self, artifact_id: str, additional_seconds: int) -> Dict[str, Any]:
299
- """Extend the TTL of an artifact's metadata."""
300
- return await self._metadata.extend_ttl(artifact_id, additional_seconds)
301
-
302
- async def list_by_session(self, session_id: str, limit: int = 100) -> List[Dict[str, Any]]:
303
- """List artifacts for a specific session."""
304
- return await self._metadata.list_by_session(session_id, limit)
305
-
306
- async def list_by_prefix(
307
- self,
308
- session_id: str,
309
- prefix: str = "",
310
- limit: int = 100
311
- ) -> List[Dict[str, Any]]:
312
- """List artifacts in a session with filename prefix filtering."""
313
- return await self._metadata.list_by_prefix(session_id, prefix, limit)
314
-
315
- # ─────────────────────────────────────────────────────────────────
316
- # Batch operations (delegated to BatchOperations)
491
+ # Batch operations
317
492
  # ─────────────────────────────────────────────────────────────────
318
493
 
319
494
  async def store_batch(
@@ -326,156 +501,107 @@ class ArtifactStore:
326
501
  return await self._batch.store_batch(items, session_id, ttl)
327
502
 
328
503
  # ─────────────────────────────────────────────────────────────────
329
- # Administrative operations (delegated to AdminOperations)
330
- # ─────────────────────────────────────────────────────────────────
331
-
332
- async def validate_configuration(self) -> Dict[str, Any]:
333
- """Validate store configuration and connectivity."""
334
- return await self._admin.validate_configuration()
335
-
336
- async def get_stats(self) -> Dict[str, Any]:
337
- """Get storage statistics."""
338
- return await self._admin.get_stats()
339
-
340
- # ─────────────────────────────────────────────────────────────────
341
- # Session-based file operations (delegated to SessionOperations)
504
+ # Metadata operations
342
505
  # ─────────────────────────────────────────────────────────────────
343
506
 
344
- async def move_file(
507
+ async def update_metadata(
345
508
  self,
346
509
  artifact_id: str,
347
510
  *,
348
- new_filename: str = None,
349
- new_session_id: str = None,
350
- new_meta: Dict[str, Any] = None
511
+ summary: str = None,
512
+ meta: Dict[str, Any] = None,
513
+ merge: bool = True,
514
+ **kwargs
351
515
  ) -> Dict[str, Any]:
352
- """Move a file within sessions or rename it."""
353
- return await self._session.move_file(
354
- artifact_id,
355
- new_filename=new_filename,
356
- new_session_id=new_session_id,
357
- new_meta=new_meta
358
- )
359
-
360
- async def copy_file(
361
- self,
362
- artifact_id: str,
363
- *,
364
- new_filename: str = None,
365
- target_session_id: str = None,
366
- new_meta: Dict[str, Any] = None,
367
- summary: str = None
368
- ) -> str:
369
- """Copy a file within or across sessions."""
370
- return await self._session.copy_file(
371
- artifact_id,
372
- new_filename=new_filename,
373
- target_session_id=target_session_id,
374
- new_meta=new_meta,
375
- summary=summary
376
- )
377
-
378
- async def read_file(
379
- self,
380
- artifact_id: str,
381
- *,
382
- encoding: str = "utf-8",
383
- as_text: bool = True
384
- ) -> Union[str, bytes]:
385
- """Read file content directly."""
386
- return await self._session.read_file(
516
+ """Update artifact metadata."""
517
+ return await self._metadata.update_metadata(
387
518
  artifact_id,
388
- encoding=encoding,
389
- as_text=as_text
390
- )
391
-
392
- async def write_file(
393
- self,
394
- content: Union[str, bytes],
395
- *,
396
- filename: str,
397
- mime: str = "text/plain",
398
- summary: str = "",
399
- session_id: str = None,
400
- meta: Dict[str, Any] = None,
401
- encoding: str = "utf-8",
402
- overwrite_artifact_id: str = None
403
- ) -> str:
404
- """Write content to a new file or overwrite existing."""
405
- return await self._session.write_file(
406
- content,
407
- filename=filename,
408
- mime=mime,
409
519
  summary=summary,
410
- session_id=session_id,
411
520
  meta=meta,
412
- encoding=encoding,
413
- overwrite_artifact_id=overwrite_artifact_id
521
+ merge=merge,
522
+ **kwargs
414
523
  )
415
524
 
416
- async def get_directory_contents(
525
+ async def extend_ttl(
417
526
  self,
418
- session_id: str,
419
- directory_prefix: str = "",
420
- limit: int = 100
421
- ) -> List[Dict[str, Any]]:
422
- """List files in a directory-like structure within a session."""
423
- return await self._session.get_directory_contents(
424
- session_id,
425
- directory_prefix,
426
- limit
427
- )
527
+ artifact_id: str,
528
+ additional_seconds: int
529
+ ) -> Dict[str, Any]:
530
+ """Extend artifact TTL."""
531
+ return await self._metadata.extend_ttl(artifact_id, additional_seconds)
428
532
 
429
533
  # ─────────────────────────────────────────────────────────────────
430
- # Resource management
534
+ # Administrative operations
431
535
  # ─────────────────────────────────────────────────────────────────
432
536
 
433
- async def close(self):
434
- """Mark store as closed."""
435
- if not self._closed:
436
- self._closed = True
437
- logger.info("ArtifactStore closed")
438
-
439
- async def __aenter__(self):
440
- return self
537
+ async def validate_configuration(self) -> Dict[str, Any]:
538
+ """Validate store configuration and connectivity."""
539
+ return await self._admin.validate_configuration()
441
540
 
442
- async def __aexit__(self, exc_type, exc_val, exc_tb):
443
- await self.close()
541
+ async def get_stats(self) -> Dict[str, Any]:
542
+ """Get storage statistics."""
543
+ stats = await self._admin.get_stats()
544
+
545
+ # Add session manager stats
546
+ session_stats = self._session_manager.get_cache_stats()
547
+ stats["session_manager"] = session_stats
548
+
549
+ return stats
444
550
 
445
551
  # ─────────────────────────────────────────────────────────────────
446
- # Helper functions (still needed for provider loading)
552
+ # Helpers
447
553
  # ─────────────────────────────────────────────────────────────────
448
554
 
555
+ def _detect_sandbox_id(self) -> str:
556
+ """Auto-detect sandbox ID."""
557
+ candidates = [
558
+ os.getenv("ARTIFACT_SANDBOX_ID"),
559
+ os.getenv("SANDBOX_ID"),
560
+ os.getenv("HOSTNAME"),
561
+ ]
562
+
563
+ for candidate in candidates:
564
+ if candidate:
565
+ clean_id = "".join(c for c in candidate if c.isalnum() or c in "-_")[:32]
566
+ if clean_id:
567
+ return clean_id
568
+
569
+ # Generate fallback
570
+ return f"sandbox-{uuid.uuid4().hex[:8]}"
571
+
449
572
  def _load_storage_provider(self, name: str) -> Callable[[], AsyncContextManager]:
450
- """Load storage provider by name."""
573
+ """Load storage provider."""
451
574
  from importlib import import_module
452
-
575
+
453
576
  try:
454
577
  mod = import_module(f"chuk_artifacts.providers.{name}")
578
+ return mod.factory()
455
579
  except ModuleNotFoundError as exc:
456
- available = ["memory", "filesystem", "s3", "ibm_cos", "ibm_cos_iam"]
457
- raise ValueError(
458
- f"Unknown storage provider '{name}'. "
459
- f"Available providers: {', '.join(available)}"
460
- ) from exc
461
-
462
- if not hasattr(mod, "factory"):
463
- raise AttributeError(f"Storage provider '{name}' lacks factory()")
464
-
465
- logger.info(f"Loaded storage provider: {name}")
466
- return mod.factory()
580
+ available = ["memory", "filesystem", "s3", "ibm_cos"]
581
+ raise ValueError(f"Unknown storage provider '{name}'. Available: {', '.join(available)}") from exc
467
582
 
468
583
  def _load_session_provider(self, name: str) -> Callable[[], AsyncContextManager]:
469
- """Load session provider by name."""
584
+ """Load session provider."""
470
585
  from importlib import import_module
471
-
586
+
472
587
  try:
473
588
  mod = import_module(f"chuk_sessions.providers.{name}")
589
+ return mod.factory()
474
590
  except ModuleNotFoundError as exc:
475
591
  raise ValueError(f"Unknown session provider '{name}'") from exc
476
592
 
477
- if not hasattr(mod, "factory"):
478
- raise AttributeError(f"Session provider '{name}' lacks factory()")
479
-
480
- logger.info(f"Loaded session provider: {name}")
481
- return mod.factory()
593
+ # ─────────────────────────────────────────────────────────────────
594
+ # Resource management
595
+ # ─────────────────────────────────────────────────────────────────
596
+
597
+ async def close(self):
598
+ """Close the store."""
599
+ if not self._closed:
600
+ self._closed = True
601
+ logger.info("ArtifactStore closed")
602
+
603
+ async def __aenter__(self):
604
+ return self
605
+
606
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
607
+ await self.close()