AbstractRuntime 0.0.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractruntime/__init__.py +104 -2
- abstractruntime/core/__init__.py +26 -0
- abstractruntime/core/config.py +101 -0
- abstractruntime/core/models.py +282 -0
- abstractruntime/core/policy.py +166 -0
- abstractruntime/core/runtime.py +736 -0
- abstractruntime/core/spec.py +53 -0
- abstractruntime/core/vars.py +94 -0
- abstractruntime/identity/__init__.py +7 -0
- abstractruntime/identity/fingerprint.py +57 -0
- abstractruntime/integrations/__init__.py +11 -0
- abstractruntime/integrations/abstractcore/__init__.py +47 -0
- abstractruntime/integrations/abstractcore/effect_handlers.py +119 -0
- abstractruntime/integrations/abstractcore/factory.py +187 -0
- abstractruntime/integrations/abstractcore/llm_client.py +397 -0
- abstractruntime/integrations/abstractcore/logging.py +27 -0
- abstractruntime/integrations/abstractcore/tool_executor.py +168 -0
- abstractruntime/scheduler/__init__.py +13 -0
- abstractruntime/scheduler/convenience.py +324 -0
- abstractruntime/scheduler/registry.py +101 -0
- abstractruntime/scheduler/scheduler.py +431 -0
- abstractruntime/storage/__init__.py +25 -0
- abstractruntime/storage/artifacts.py +519 -0
- abstractruntime/storage/base.py +107 -0
- abstractruntime/storage/in_memory.py +119 -0
- abstractruntime/storage/json_files.py +208 -0
- abstractruntime/storage/ledger_chain.py +153 -0
- abstractruntime/storage/snapshots.py +217 -0
- abstractruntime-0.2.0.dist-info/METADATA +163 -0
- abstractruntime-0.2.0.dist-info/RECORD +32 -0
- {abstractruntime-0.0.0.dist-info → abstractruntime-0.2.0.dist-info}/licenses/LICENSE +3 -1
- abstractruntime-0.0.0.dist-info/METADATA +0 -89
- abstractruntime-0.0.0.dist-info/RECORD +0 -5
- {abstractruntime-0.0.0.dist-info → abstractruntime-0.2.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,519 @@
|
|
|
1
|
+
"""abstractruntime.storage.artifacts
|
|
2
|
+
|
|
3
|
+
Artifact storage for large payloads.
|
|
4
|
+
|
|
5
|
+
Artifacts are stored by reference (artifact_id) instead of embedding
|
|
6
|
+
large data directly into RunState.vars. This keeps run state small
|
|
7
|
+
and JSON-serializable while supporting large payloads like:
|
|
8
|
+
- Documents and files
|
|
9
|
+
- Large LLM responses
|
|
10
|
+
- Tool outputs (search results, database queries)
|
|
11
|
+
- Media content (images, audio, video)
|
|
12
|
+
|
|
13
|
+
Design:
|
|
14
|
+
- Content-addressed: artifact_id is derived from content hash
|
|
15
|
+
- Metadata-rich: stores content_type, size, timestamps
|
|
16
|
+
- Simple interface: store/load/exists/delete
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
import hashlib
|
|
22
|
+
import json
|
|
23
|
+
import re
|
|
24
|
+
from abc import ABC, abstractmethod
|
|
25
|
+
from dataclasses import dataclass, field, asdict
|
|
26
|
+
from datetime import datetime, timezone
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from typing import Any, Dict, List, Optional, Union
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Valid artifact ID pattern: alphanumeric, hyphens, underscores
|
|
32
|
+
_ARTIFACT_ID_PATTERN = re.compile(r"^[a-zA-Z0-9_-]+$")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def utc_now_iso() -> str:
|
|
36
|
+
return datetime.now(timezone.utc).isoformat()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass
|
|
40
|
+
class ArtifactMetadata:
|
|
41
|
+
"""Metadata about a stored artifact."""
|
|
42
|
+
|
|
43
|
+
artifact_id: str
|
|
44
|
+
content_type: str # MIME type or semantic type
|
|
45
|
+
size_bytes: int
|
|
46
|
+
created_at: str
|
|
47
|
+
run_id: Optional[str] = None # Optional association with a run
|
|
48
|
+
tags: Dict[str, str] = field(default_factory=dict)
|
|
49
|
+
|
|
50
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
51
|
+
return asdict(self)
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ArtifactMetadata":
|
|
55
|
+
return cls(
|
|
56
|
+
artifact_id=data["artifact_id"],
|
|
57
|
+
content_type=data["content_type"],
|
|
58
|
+
size_bytes=data["size_bytes"],
|
|
59
|
+
created_at=data["created_at"],
|
|
60
|
+
run_id=data.get("run_id"),
|
|
61
|
+
tags=data.get("tags") or {},
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class Artifact:
|
|
67
|
+
"""An artifact with its content and metadata."""
|
|
68
|
+
|
|
69
|
+
metadata: ArtifactMetadata
|
|
70
|
+
content: bytes
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def artifact_id(self) -> str:
|
|
74
|
+
return self.metadata.artifact_id
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def content_type(self) -> str:
|
|
78
|
+
return self.metadata.content_type
|
|
79
|
+
|
|
80
|
+
def as_text(self, encoding: str = "utf-8") -> str:
|
|
81
|
+
"""Decode content as text."""
|
|
82
|
+
return self.content.decode(encoding)
|
|
83
|
+
|
|
84
|
+
def as_json(self) -> Any:
|
|
85
|
+
"""Parse content as JSON."""
|
|
86
|
+
return json.loads(self.content.decode("utf-8"))
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def compute_artifact_id(content: bytes) -> str:
|
|
90
|
+
"""Compute content-addressed artifact ID using SHA-256."""
|
|
91
|
+
return hashlib.sha256(content).hexdigest()[:32]
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def validate_artifact_id(artifact_id: str) -> None:
|
|
95
|
+
"""Validate artifact ID to prevent path traversal attacks.
|
|
96
|
+
|
|
97
|
+
Raises:
|
|
98
|
+
ValueError: If artifact_id contains invalid characters.
|
|
99
|
+
"""
|
|
100
|
+
if not artifact_id:
|
|
101
|
+
raise ValueError("artifact_id cannot be empty")
|
|
102
|
+
if not _ARTIFACT_ID_PATTERN.match(artifact_id):
|
|
103
|
+
raise ValueError(
|
|
104
|
+
f"Invalid artifact_id '{artifact_id}': must contain only "
|
|
105
|
+
"alphanumeric characters, hyphens, and underscores"
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class ArtifactStore(ABC):
|
|
110
|
+
"""Abstract base class for artifact storage."""
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def store(
|
|
114
|
+
self,
|
|
115
|
+
content: bytes,
|
|
116
|
+
*,
|
|
117
|
+
content_type: str = "application/octet-stream",
|
|
118
|
+
run_id: Optional[str] = None,
|
|
119
|
+
tags: Optional[Dict[str, str]] = None,
|
|
120
|
+
artifact_id: Optional[str] = None,
|
|
121
|
+
) -> ArtifactMetadata:
|
|
122
|
+
"""Store an artifact and return its metadata.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
content: The artifact content as bytes.
|
|
126
|
+
content_type: MIME type or semantic type.
|
|
127
|
+
run_id: Optional run ID to associate with.
|
|
128
|
+
tags: Optional key-value tags.
|
|
129
|
+
artifact_id: Optional explicit ID (defaults to content hash).
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
ArtifactMetadata with the artifact_id.
|
|
133
|
+
"""
|
|
134
|
+
...
|
|
135
|
+
|
|
136
|
+
@abstractmethod
|
|
137
|
+
def load(self, artifact_id: str) -> Optional[Artifact]:
|
|
138
|
+
"""Load an artifact by ID.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
artifact_id: The artifact ID.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
Artifact if found, None otherwise.
|
|
145
|
+
"""
|
|
146
|
+
...
|
|
147
|
+
|
|
148
|
+
@abstractmethod
|
|
149
|
+
def get_metadata(self, artifact_id: str) -> Optional[ArtifactMetadata]:
|
|
150
|
+
"""Get artifact metadata without loading content.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
artifact_id: The artifact ID.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
ArtifactMetadata if found, None otherwise.
|
|
157
|
+
"""
|
|
158
|
+
...
|
|
159
|
+
|
|
160
|
+
@abstractmethod
|
|
161
|
+
def exists(self, artifact_id: str) -> bool:
|
|
162
|
+
"""Check if an artifact exists.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
artifact_id: The artifact ID.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
True if artifact exists.
|
|
169
|
+
"""
|
|
170
|
+
...
|
|
171
|
+
|
|
172
|
+
@abstractmethod
|
|
173
|
+
def delete(self, artifact_id: str) -> bool:
|
|
174
|
+
"""Delete an artifact.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
artifact_id: The artifact ID.
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
True if deleted, False if not found.
|
|
181
|
+
"""
|
|
182
|
+
...
|
|
183
|
+
|
|
184
|
+
@abstractmethod
|
|
185
|
+
def list_by_run(self, run_id: str) -> List[ArtifactMetadata]:
|
|
186
|
+
"""List all artifacts associated with a run.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
run_id: The run ID.
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
List of ArtifactMetadata.
|
|
193
|
+
"""
|
|
194
|
+
...
|
|
195
|
+
|
|
196
|
+
@abstractmethod
|
|
197
|
+
def list_all(self, *, limit: int = 1000) -> List[ArtifactMetadata]:
|
|
198
|
+
"""List all artifacts.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
limit: Maximum number of artifacts to return.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
List of ArtifactMetadata.
|
|
205
|
+
"""
|
|
206
|
+
...
|
|
207
|
+
|
|
208
|
+
def delete_by_run(self, run_id: str) -> int:
|
|
209
|
+
"""Delete all artifacts associated with a run.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
run_id: The run ID.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
Number of artifacts deleted.
|
|
216
|
+
"""
|
|
217
|
+
artifacts = self.list_by_run(run_id)
|
|
218
|
+
count = 0
|
|
219
|
+
for meta in artifacts:
|
|
220
|
+
if self.delete(meta.artifact_id):
|
|
221
|
+
count += 1
|
|
222
|
+
return count
|
|
223
|
+
|
|
224
|
+
def search(
|
|
225
|
+
self,
|
|
226
|
+
*,
|
|
227
|
+
run_id: Optional[str] = None,
|
|
228
|
+
content_type: Optional[str] = None,
|
|
229
|
+
tags: Optional[Dict[str, str]] = None,
|
|
230
|
+
limit: int = 1000,
|
|
231
|
+
) -> List[ArtifactMetadata]:
|
|
232
|
+
"""Filter artifacts by simple metadata fields.
|
|
233
|
+
|
|
234
|
+
This is intentionally a *metadata filter*, not semantic search. Semantic/embedding
|
|
235
|
+
retrieval belongs in AbstractMemory or higher-level components.
|
|
236
|
+
"""
|
|
237
|
+
if run_id is None:
|
|
238
|
+
candidates = list(self.list_all(limit=limit))
|
|
239
|
+
else:
|
|
240
|
+
candidates = list(self.list_by_run(run_id))
|
|
241
|
+
|
|
242
|
+
if content_type is not None:
|
|
243
|
+
candidates = [m for m in candidates if m.content_type == content_type]
|
|
244
|
+
|
|
245
|
+
if tags:
|
|
246
|
+
candidates = [
|
|
247
|
+
m
|
|
248
|
+
for m in candidates
|
|
249
|
+
if all((m.tags or {}).get(k) == v for k, v in tags.items())
|
|
250
|
+
]
|
|
251
|
+
|
|
252
|
+
candidates.sort(key=lambda m: m.created_at, reverse=True)
|
|
253
|
+
return candidates[:limit]
|
|
254
|
+
|
|
255
|
+
# Convenience methods
|
|
256
|
+
|
|
257
|
+
def store_text(
|
|
258
|
+
self,
|
|
259
|
+
text: str,
|
|
260
|
+
*,
|
|
261
|
+
content_type: str = "text/plain",
|
|
262
|
+
encoding: str = "utf-8",
|
|
263
|
+
run_id: Optional[str] = None,
|
|
264
|
+
tags: Optional[Dict[str, str]] = None,
|
|
265
|
+
) -> ArtifactMetadata:
|
|
266
|
+
"""Store text content."""
|
|
267
|
+
return self.store(
|
|
268
|
+
text.encode(encoding),
|
|
269
|
+
content_type=content_type,
|
|
270
|
+
run_id=run_id,
|
|
271
|
+
tags=tags,
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
def store_json(
|
|
275
|
+
self,
|
|
276
|
+
data: Any,
|
|
277
|
+
*,
|
|
278
|
+
run_id: Optional[str] = None,
|
|
279
|
+
tags: Optional[Dict[str, str]] = None,
|
|
280
|
+
) -> ArtifactMetadata:
|
|
281
|
+
"""Store JSON-serializable data."""
|
|
282
|
+
content = json.dumps(data, ensure_ascii=False, separators=(",", ":")).encode("utf-8")
|
|
283
|
+
return self.store(
|
|
284
|
+
content,
|
|
285
|
+
content_type="application/json",
|
|
286
|
+
run_id=run_id,
|
|
287
|
+
tags=tags,
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
def load_text(self, artifact_id: str, encoding: str = "utf-8") -> Optional[str]:
|
|
291
|
+
"""Load artifact as text."""
|
|
292
|
+
artifact = self.load(artifact_id)
|
|
293
|
+
if artifact is None:
|
|
294
|
+
return None
|
|
295
|
+
return artifact.as_text(encoding)
|
|
296
|
+
|
|
297
|
+
def load_json(self, artifact_id: str) -> Optional[Any]:
|
|
298
|
+
"""Load artifact as JSON."""
|
|
299
|
+
artifact = self.load(artifact_id)
|
|
300
|
+
if artifact is None:
|
|
301
|
+
return None
|
|
302
|
+
return artifact.as_json()
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
class InMemoryArtifactStore(ArtifactStore):
|
|
306
|
+
"""In-memory artifact store for testing and development."""
|
|
307
|
+
|
|
308
|
+
def __init__(self) -> None:
|
|
309
|
+
self._artifacts: Dict[str, Artifact] = {}
|
|
310
|
+
|
|
311
|
+
def store(
|
|
312
|
+
self,
|
|
313
|
+
content: bytes,
|
|
314
|
+
*,
|
|
315
|
+
content_type: str = "application/octet-stream",
|
|
316
|
+
run_id: Optional[str] = None,
|
|
317
|
+
tags: Optional[Dict[str, str]] = None,
|
|
318
|
+
artifact_id: Optional[str] = None,
|
|
319
|
+
) -> ArtifactMetadata:
|
|
320
|
+
if artifact_id is None:
|
|
321
|
+
artifact_id = compute_artifact_id(content)
|
|
322
|
+
|
|
323
|
+
metadata = ArtifactMetadata(
|
|
324
|
+
artifact_id=artifact_id,
|
|
325
|
+
content_type=content_type,
|
|
326
|
+
size_bytes=len(content),
|
|
327
|
+
created_at=utc_now_iso(),
|
|
328
|
+
run_id=run_id,
|
|
329
|
+
tags=tags or {},
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
self._artifacts[artifact_id] = Artifact(metadata=metadata, content=content)
|
|
333
|
+
return metadata
|
|
334
|
+
|
|
335
|
+
def load(self, artifact_id: str) -> Optional[Artifact]:
|
|
336
|
+
return self._artifacts.get(artifact_id)
|
|
337
|
+
|
|
338
|
+
def get_metadata(self, artifact_id: str) -> Optional[ArtifactMetadata]:
|
|
339
|
+
artifact = self._artifacts.get(artifact_id)
|
|
340
|
+
if artifact is None:
|
|
341
|
+
return None
|
|
342
|
+
return artifact.metadata
|
|
343
|
+
|
|
344
|
+
def exists(self, artifact_id: str) -> bool:
|
|
345
|
+
return artifact_id in self._artifacts
|
|
346
|
+
|
|
347
|
+
def delete(self, artifact_id: str) -> bool:
|
|
348
|
+
if artifact_id in self._artifacts:
|
|
349
|
+
del self._artifacts[artifact_id]
|
|
350
|
+
return True
|
|
351
|
+
return False
|
|
352
|
+
|
|
353
|
+
def list_by_run(self, run_id: str) -> List[ArtifactMetadata]:
|
|
354
|
+
return [
|
|
355
|
+
a.metadata
|
|
356
|
+
for a in self._artifacts.values()
|
|
357
|
+
if a.metadata.run_id == run_id
|
|
358
|
+
]
|
|
359
|
+
|
|
360
|
+
def list_all(self, *, limit: int = 1000) -> List[ArtifactMetadata]:
|
|
361
|
+
results = [a.metadata for a in self._artifacts.values()]
|
|
362
|
+
# Sort by created_at descending
|
|
363
|
+
results.sort(key=lambda m: m.created_at, reverse=True)
|
|
364
|
+
return results[:limit]
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
class FileArtifactStore(ArtifactStore):
|
|
368
|
+
"""File-based artifact store.
|
|
369
|
+
|
|
370
|
+
Directory structure:
|
|
371
|
+
base_dir/
|
|
372
|
+
artifacts/
|
|
373
|
+
{artifact_id}.bin # content
|
|
374
|
+
{artifact_id}.meta # metadata JSON
|
|
375
|
+
"""
|
|
376
|
+
|
|
377
|
+
def __init__(self, base_dir: Union[str, Path]) -> None:
|
|
378
|
+
self._base = Path(base_dir)
|
|
379
|
+
self._artifacts_dir = self._base / "artifacts"
|
|
380
|
+
self._artifacts_dir.mkdir(parents=True, exist_ok=True)
|
|
381
|
+
|
|
382
|
+
def _content_path(self, artifact_id: str) -> Path:
|
|
383
|
+
validate_artifact_id(artifact_id)
|
|
384
|
+
return self._artifacts_dir / f"{artifact_id}.bin"
|
|
385
|
+
|
|
386
|
+
def _metadata_path(self, artifact_id: str) -> Path:
|
|
387
|
+
validate_artifact_id(artifact_id)
|
|
388
|
+
return self._artifacts_dir / f"{artifact_id}.meta"
|
|
389
|
+
|
|
390
|
+
def store(
|
|
391
|
+
self,
|
|
392
|
+
content: bytes,
|
|
393
|
+
*,
|
|
394
|
+
content_type: str = "application/octet-stream",
|
|
395
|
+
run_id: Optional[str] = None,
|
|
396
|
+
tags: Optional[Dict[str, str]] = None,
|
|
397
|
+
artifact_id: Optional[str] = None,
|
|
398
|
+
) -> ArtifactMetadata:
|
|
399
|
+
if artifact_id is None:
|
|
400
|
+
artifact_id = compute_artifact_id(content)
|
|
401
|
+
|
|
402
|
+
metadata = ArtifactMetadata(
|
|
403
|
+
artifact_id=artifact_id,
|
|
404
|
+
content_type=content_type,
|
|
405
|
+
size_bytes=len(content),
|
|
406
|
+
created_at=utc_now_iso(),
|
|
407
|
+
run_id=run_id,
|
|
408
|
+
tags=tags or {},
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
# Write content
|
|
412
|
+
content_path = self._content_path(artifact_id)
|
|
413
|
+
with open(content_path, "wb") as f:
|
|
414
|
+
f.write(content)
|
|
415
|
+
|
|
416
|
+
# Write metadata
|
|
417
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
418
|
+
with open(metadata_path, "w", encoding="utf-8") as f:
|
|
419
|
+
json.dump(metadata.to_dict(), f, ensure_ascii=False, indent=2)
|
|
420
|
+
|
|
421
|
+
return metadata
|
|
422
|
+
|
|
423
|
+
def load(self, artifact_id: str) -> Optional[Artifact]:
|
|
424
|
+
content_path = self._content_path(artifact_id)
|
|
425
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
426
|
+
|
|
427
|
+
if not content_path.exists() or not metadata_path.exists():
|
|
428
|
+
return None
|
|
429
|
+
|
|
430
|
+
with open(content_path, "rb") as f:
|
|
431
|
+
content = f.read()
|
|
432
|
+
|
|
433
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
434
|
+
metadata_dict = json.load(f)
|
|
435
|
+
|
|
436
|
+
metadata = ArtifactMetadata.from_dict(metadata_dict)
|
|
437
|
+
return Artifact(metadata=metadata, content=content)
|
|
438
|
+
|
|
439
|
+
def get_metadata(self, artifact_id: str) -> Optional[ArtifactMetadata]:
|
|
440
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
441
|
+
|
|
442
|
+
if not metadata_path.exists():
|
|
443
|
+
return None
|
|
444
|
+
|
|
445
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
446
|
+
metadata_dict = json.load(f)
|
|
447
|
+
|
|
448
|
+
return ArtifactMetadata.from_dict(metadata_dict)
|
|
449
|
+
|
|
450
|
+
def exists(self, artifact_id: str) -> bool:
|
|
451
|
+
return self._content_path(artifact_id).exists()
|
|
452
|
+
|
|
453
|
+
def delete(self, artifact_id: str) -> bool:
|
|
454
|
+
content_path = self._content_path(artifact_id)
|
|
455
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
456
|
+
|
|
457
|
+
deleted = False
|
|
458
|
+
if content_path.exists():
|
|
459
|
+
content_path.unlink()
|
|
460
|
+
deleted = True
|
|
461
|
+
if metadata_path.exists():
|
|
462
|
+
metadata_path.unlink()
|
|
463
|
+
deleted = True
|
|
464
|
+
|
|
465
|
+
return deleted
|
|
466
|
+
|
|
467
|
+
def list_by_run(self, run_id: str) -> List[ArtifactMetadata]:
|
|
468
|
+
results = []
|
|
469
|
+
for metadata_path in self._artifacts_dir.glob("*.meta"):
|
|
470
|
+
try:
|
|
471
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
472
|
+
metadata_dict = json.load(f)
|
|
473
|
+
if metadata_dict.get("run_id") == run_id:
|
|
474
|
+
results.append(ArtifactMetadata.from_dict(metadata_dict))
|
|
475
|
+
except (json.JSONDecodeError, IOError):
|
|
476
|
+
continue
|
|
477
|
+
return results
|
|
478
|
+
|
|
479
|
+
def list_all(self, *, limit: int = 1000) -> List[ArtifactMetadata]:
|
|
480
|
+
results = []
|
|
481
|
+
for metadata_path in self._artifacts_dir.glob("*.meta"):
|
|
482
|
+
try:
|
|
483
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
484
|
+
metadata_dict = json.load(f)
|
|
485
|
+
results.append(ArtifactMetadata.from_dict(metadata_dict))
|
|
486
|
+
except (json.JSONDecodeError, IOError):
|
|
487
|
+
continue
|
|
488
|
+
# Sort by created_at descending
|
|
489
|
+
results.sort(key=lambda m: m.created_at, reverse=True)
|
|
490
|
+
return results[:limit]
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
# Artifact reference helpers for use in RunState.vars
|
|
494
|
+
|
|
495
|
+
def artifact_ref(artifact_id: str) -> Dict[str, str]:
|
|
496
|
+
"""Create an artifact reference for storing in vars.
|
|
497
|
+
|
|
498
|
+
Usage:
|
|
499
|
+
metadata = artifact_store.store_json(large_data)
|
|
500
|
+
run.vars["result"] = artifact_ref(metadata.artifact_id)
|
|
501
|
+
"""
|
|
502
|
+
return {"$artifact": artifact_id}
|
|
503
|
+
|
|
504
|
+
|
|
505
|
+
def is_artifact_ref(value: Any) -> bool:
|
|
506
|
+
"""Check if a value is an artifact reference."""
|
|
507
|
+
return isinstance(value, dict) and "$artifact" in value
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
def get_artifact_id(ref: Dict[str, str]) -> str:
|
|
511
|
+
"""Extract artifact ID from a reference."""
|
|
512
|
+
return ref["$artifact"]
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
def resolve_artifact(ref: Dict[str, str], store: ArtifactStore) -> Optional[Artifact]:
|
|
516
|
+
"""Resolve an artifact reference to its content."""
|
|
517
|
+
if not is_artifact_ref(ref):
|
|
518
|
+
return None
|
|
519
|
+
return store.load(get_artifact_id(ref))
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""abstractruntime.storage.base
|
|
2
|
+
|
|
3
|
+
Storage interfaces (durability backends).
|
|
4
|
+
|
|
5
|
+
These are intentionally minimal for v0.1.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from abc import ABC, abstractmethod
|
|
11
|
+
from typing import Any, Dict, List, Optional, Protocol, runtime_checkable
|
|
12
|
+
|
|
13
|
+
from ..core.models import RunState, RunStatus, StepRecord, WaitReason
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RunStore(ABC):
|
|
17
|
+
@abstractmethod
|
|
18
|
+
def save(self, run: RunState) -> None: ...
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def load(self, run_id: str) -> Optional[RunState]: ...
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@runtime_checkable
|
|
25
|
+
class QueryableRunStore(Protocol):
|
|
26
|
+
"""Extended interface for querying runs.
|
|
27
|
+
|
|
28
|
+
This is a Protocol (structural typing) so existing RunStore implementations
|
|
29
|
+
can add these methods without changing their inheritance.
|
|
30
|
+
|
|
31
|
+
Used by:
|
|
32
|
+
- Scheduler/driver loops (find due wait_until runs)
|
|
33
|
+
- Operational tooling (list waiting runs)
|
|
34
|
+
- UI backoffice views (runs by status)
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def list_runs(
|
|
38
|
+
self,
|
|
39
|
+
*,
|
|
40
|
+
status: Optional[RunStatus] = None,
|
|
41
|
+
wait_reason: Optional[WaitReason] = None,
|
|
42
|
+
workflow_id: Optional[str] = None,
|
|
43
|
+
limit: int = 100,
|
|
44
|
+
) -> List[RunState]:
|
|
45
|
+
"""List runs matching the given filters.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
status: Filter by run status (RUNNING, WAITING, COMPLETED, FAILED)
|
|
49
|
+
wait_reason: Filter by wait reason (only applies to WAITING runs)
|
|
50
|
+
workflow_id: Filter by workflow ID
|
|
51
|
+
limit: Maximum number of runs to return
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
List of matching RunState objects, ordered by updated_at descending
|
|
55
|
+
"""
|
|
56
|
+
...
|
|
57
|
+
|
|
58
|
+
def list_due_wait_until(
|
|
59
|
+
self,
|
|
60
|
+
*,
|
|
61
|
+
now_iso: str,
|
|
62
|
+
limit: int = 100,
|
|
63
|
+
) -> List[RunState]:
|
|
64
|
+
"""List runs waiting for a time threshold that has passed.
|
|
65
|
+
|
|
66
|
+
This finds runs where:
|
|
67
|
+
- status == WAITING
|
|
68
|
+
- waiting.reason == UNTIL
|
|
69
|
+
- waiting.until <= now_iso
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
now_iso: Current time as ISO 8601 string
|
|
73
|
+
limit: Maximum number of runs to return
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
List of due RunState objects, ordered by waiting.until ascending
|
|
77
|
+
"""
|
|
78
|
+
...
|
|
79
|
+
|
|
80
|
+
def list_children(
|
|
81
|
+
self,
|
|
82
|
+
*,
|
|
83
|
+
parent_run_id: str,
|
|
84
|
+
status: Optional[RunStatus] = None,
|
|
85
|
+
) -> List[RunState]:
|
|
86
|
+
"""List child runs of a parent.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
parent_run_id: The parent run ID
|
|
90
|
+
status: Optional filter by status
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
List of child RunState objects
|
|
94
|
+
"""
|
|
95
|
+
...
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class LedgerStore(ABC):
|
|
99
|
+
"""Append-only journal store."""
|
|
100
|
+
|
|
101
|
+
@abstractmethod
|
|
102
|
+
def append(self, record: StepRecord) -> None: ...
|
|
103
|
+
|
|
104
|
+
@abstractmethod
|
|
105
|
+
def list(self, run_id: str) -> List[Dict[str, Any]]: ...
|
|
106
|
+
|
|
107
|
+
|