AbstractRuntime 0.0.0__py3-none-any.whl → 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractruntime/__init__.py +104 -2
- abstractruntime/core/__init__.py +19 -0
- abstractruntime/core/models.py +239 -0
- abstractruntime/core/policy.py +166 -0
- abstractruntime/core/runtime.py +581 -0
- abstractruntime/core/spec.py +53 -0
- abstractruntime/identity/__init__.py +7 -0
- abstractruntime/identity/fingerprint.py +57 -0
- abstractruntime/integrations/__init__.py +11 -0
- abstractruntime/integrations/abstractcore/__init__.py +43 -0
- abstractruntime/integrations/abstractcore/effect_handlers.py +89 -0
- abstractruntime/integrations/abstractcore/factory.py +150 -0
- abstractruntime/integrations/abstractcore/llm_client.py +296 -0
- abstractruntime/integrations/abstractcore/logging.py +27 -0
- abstractruntime/integrations/abstractcore/tool_executor.py +89 -0
- abstractruntime/scheduler/__init__.py +13 -0
- abstractruntime/scheduler/convenience.py +324 -0
- abstractruntime/scheduler/registry.py +101 -0
- abstractruntime/scheduler/scheduler.py +431 -0
- abstractruntime/storage/__init__.py +25 -0
- abstractruntime/storage/artifacts.py +488 -0
- abstractruntime/storage/base.py +107 -0
- abstractruntime/storage/in_memory.py +119 -0
- abstractruntime/storage/json_files.py +208 -0
- abstractruntime/storage/ledger_chain.py +153 -0
- abstractruntime/storage/snapshots.py +217 -0
- abstractruntime-0.0.1.dist-info/METADATA +163 -0
- abstractruntime-0.0.1.dist-info/RECORD +30 -0
- {abstractruntime-0.0.0.dist-info → abstractruntime-0.0.1.dist-info}/licenses/LICENSE +3 -1
- abstractruntime-0.0.0.dist-info/METADATA +0 -89
- abstractruntime-0.0.0.dist-info/RECORD +0 -5
- {abstractruntime-0.0.0.dist-info → abstractruntime-0.0.1.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,488 @@
|
|
|
1
|
+
"""abstractruntime.storage.artifacts
|
|
2
|
+
|
|
3
|
+
Artifact storage for large payloads.
|
|
4
|
+
|
|
5
|
+
Artifacts are stored by reference (artifact_id) instead of embedding
|
|
6
|
+
large data directly into RunState.vars. This keeps run state small
|
|
7
|
+
and JSON-serializable while supporting large payloads like:
|
|
8
|
+
- Documents and files
|
|
9
|
+
- Large LLM responses
|
|
10
|
+
- Tool outputs (search results, database queries)
|
|
11
|
+
- Media content (images, audio, video)
|
|
12
|
+
|
|
13
|
+
Design:
|
|
14
|
+
- Content-addressed: artifact_id is derived from content hash
|
|
15
|
+
- Metadata-rich: stores content_type, size, timestamps
|
|
16
|
+
- Simple interface: store/load/exists/delete
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
import hashlib
|
|
22
|
+
import json
|
|
23
|
+
import re
|
|
24
|
+
from abc import ABC, abstractmethod
|
|
25
|
+
from dataclasses import dataclass, field, asdict
|
|
26
|
+
from datetime import datetime, timezone
|
|
27
|
+
from pathlib import Path
|
|
28
|
+
from typing import Any, Dict, List, Optional, Union
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Valid artifact ID pattern: alphanumeric, hyphens, underscores
|
|
32
|
+
_ARTIFACT_ID_PATTERN = re.compile(r"^[a-zA-Z0-9_-]+$")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def utc_now_iso() -> str:
|
|
36
|
+
return datetime.now(timezone.utc).isoformat()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass
|
|
40
|
+
class ArtifactMetadata:
|
|
41
|
+
"""Metadata about a stored artifact."""
|
|
42
|
+
|
|
43
|
+
artifact_id: str
|
|
44
|
+
content_type: str # MIME type or semantic type
|
|
45
|
+
size_bytes: int
|
|
46
|
+
created_at: str
|
|
47
|
+
run_id: Optional[str] = None # Optional association with a run
|
|
48
|
+
tags: Dict[str, str] = field(default_factory=dict)
|
|
49
|
+
|
|
50
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
51
|
+
return asdict(self)
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ArtifactMetadata":
|
|
55
|
+
return cls(
|
|
56
|
+
artifact_id=data["artifact_id"],
|
|
57
|
+
content_type=data["content_type"],
|
|
58
|
+
size_bytes=data["size_bytes"],
|
|
59
|
+
created_at=data["created_at"],
|
|
60
|
+
run_id=data.get("run_id"),
|
|
61
|
+
tags=data.get("tags") or {},
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class Artifact:
|
|
67
|
+
"""An artifact with its content and metadata."""
|
|
68
|
+
|
|
69
|
+
metadata: ArtifactMetadata
|
|
70
|
+
content: bytes
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def artifact_id(self) -> str:
|
|
74
|
+
return self.metadata.artifact_id
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def content_type(self) -> str:
|
|
78
|
+
return self.metadata.content_type
|
|
79
|
+
|
|
80
|
+
def as_text(self, encoding: str = "utf-8") -> str:
|
|
81
|
+
"""Decode content as text."""
|
|
82
|
+
return self.content.decode(encoding)
|
|
83
|
+
|
|
84
|
+
def as_json(self) -> Any:
|
|
85
|
+
"""Parse content as JSON."""
|
|
86
|
+
return json.loads(self.content.decode("utf-8"))
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def compute_artifact_id(content: bytes) -> str:
|
|
90
|
+
"""Compute content-addressed artifact ID using SHA-256."""
|
|
91
|
+
return hashlib.sha256(content).hexdigest()[:32]
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def validate_artifact_id(artifact_id: str) -> None:
|
|
95
|
+
"""Validate artifact ID to prevent path traversal attacks.
|
|
96
|
+
|
|
97
|
+
Raises:
|
|
98
|
+
ValueError: If artifact_id contains invalid characters.
|
|
99
|
+
"""
|
|
100
|
+
if not artifact_id:
|
|
101
|
+
raise ValueError("artifact_id cannot be empty")
|
|
102
|
+
if not _ARTIFACT_ID_PATTERN.match(artifact_id):
|
|
103
|
+
raise ValueError(
|
|
104
|
+
f"Invalid artifact_id '{artifact_id}': must contain only "
|
|
105
|
+
"alphanumeric characters, hyphens, and underscores"
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class ArtifactStore(ABC):
|
|
110
|
+
"""Abstract base class for artifact storage."""
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def store(
|
|
114
|
+
self,
|
|
115
|
+
content: bytes,
|
|
116
|
+
*,
|
|
117
|
+
content_type: str = "application/octet-stream",
|
|
118
|
+
run_id: Optional[str] = None,
|
|
119
|
+
tags: Optional[Dict[str, str]] = None,
|
|
120
|
+
artifact_id: Optional[str] = None,
|
|
121
|
+
) -> ArtifactMetadata:
|
|
122
|
+
"""Store an artifact and return its metadata.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
content: The artifact content as bytes.
|
|
126
|
+
content_type: MIME type or semantic type.
|
|
127
|
+
run_id: Optional run ID to associate with.
|
|
128
|
+
tags: Optional key-value tags.
|
|
129
|
+
artifact_id: Optional explicit ID (defaults to content hash).
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
ArtifactMetadata with the artifact_id.
|
|
133
|
+
"""
|
|
134
|
+
...
|
|
135
|
+
|
|
136
|
+
@abstractmethod
|
|
137
|
+
def load(self, artifact_id: str) -> Optional[Artifact]:
|
|
138
|
+
"""Load an artifact by ID.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
artifact_id: The artifact ID.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
Artifact if found, None otherwise.
|
|
145
|
+
"""
|
|
146
|
+
...
|
|
147
|
+
|
|
148
|
+
@abstractmethod
|
|
149
|
+
def get_metadata(self, artifact_id: str) -> Optional[ArtifactMetadata]:
|
|
150
|
+
"""Get artifact metadata without loading content.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
artifact_id: The artifact ID.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
ArtifactMetadata if found, None otherwise.
|
|
157
|
+
"""
|
|
158
|
+
...
|
|
159
|
+
|
|
160
|
+
@abstractmethod
|
|
161
|
+
def exists(self, artifact_id: str) -> bool:
|
|
162
|
+
"""Check if an artifact exists.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
artifact_id: The artifact ID.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
True if artifact exists.
|
|
169
|
+
"""
|
|
170
|
+
...
|
|
171
|
+
|
|
172
|
+
@abstractmethod
|
|
173
|
+
def delete(self, artifact_id: str) -> bool:
|
|
174
|
+
"""Delete an artifact.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
artifact_id: The artifact ID.
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
True if deleted, False if not found.
|
|
181
|
+
"""
|
|
182
|
+
...
|
|
183
|
+
|
|
184
|
+
@abstractmethod
|
|
185
|
+
def list_by_run(self, run_id: str) -> List[ArtifactMetadata]:
|
|
186
|
+
"""List all artifacts associated with a run.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
run_id: The run ID.
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
List of ArtifactMetadata.
|
|
193
|
+
"""
|
|
194
|
+
...
|
|
195
|
+
|
|
196
|
+
@abstractmethod
|
|
197
|
+
def list_all(self, *, limit: int = 1000) -> List[ArtifactMetadata]:
|
|
198
|
+
"""List all artifacts.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
limit: Maximum number of artifacts to return.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
List of ArtifactMetadata.
|
|
205
|
+
"""
|
|
206
|
+
...
|
|
207
|
+
|
|
208
|
+
def delete_by_run(self, run_id: str) -> int:
|
|
209
|
+
"""Delete all artifacts associated with a run.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
run_id: The run ID.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
Number of artifacts deleted.
|
|
216
|
+
"""
|
|
217
|
+
artifacts = self.list_by_run(run_id)
|
|
218
|
+
count = 0
|
|
219
|
+
for meta in artifacts:
|
|
220
|
+
if self.delete(meta.artifact_id):
|
|
221
|
+
count += 1
|
|
222
|
+
return count
|
|
223
|
+
|
|
224
|
+
# Convenience methods
|
|
225
|
+
|
|
226
|
+
def store_text(
|
|
227
|
+
self,
|
|
228
|
+
text: str,
|
|
229
|
+
*,
|
|
230
|
+
content_type: str = "text/plain",
|
|
231
|
+
encoding: str = "utf-8",
|
|
232
|
+
run_id: Optional[str] = None,
|
|
233
|
+
tags: Optional[Dict[str, str]] = None,
|
|
234
|
+
) -> ArtifactMetadata:
|
|
235
|
+
"""Store text content."""
|
|
236
|
+
return self.store(
|
|
237
|
+
text.encode(encoding),
|
|
238
|
+
content_type=content_type,
|
|
239
|
+
run_id=run_id,
|
|
240
|
+
tags=tags,
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
def store_json(
|
|
244
|
+
self,
|
|
245
|
+
data: Any,
|
|
246
|
+
*,
|
|
247
|
+
run_id: Optional[str] = None,
|
|
248
|
+
tags: Optional[Dict[str, str]] = None,
|
|
249
|
+
) -> ArtifactMetadata:
|
|
250
|
+
"""Store JSON-serializable data."""
|
|
251
|
+
content = json.dumps(data, ensure_ascii=False, separators=(",", ":")).encode("utf-8")
|
|
252
|
+
return self.store(
|
|
253
|
+
content,
|
|
254
|
+
content_type="application/json",
|
|
255
|
+
run_id=run_id,
|
|
256
|
+
tags=tags,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
def load_text(self, artifact_id: str, encoding: str = "utf-8") -> Optional[str]:
|
|
260
|
+
"""Load artifact as text."""
|
|
261
|
+
artifact = self.load(artifact_id)
|
|
262
|
+
if artifact is None:
|
|
263
|
+
return None
|
|
264
|
+
return artifact.as_text(encoding)
|
|
265
|
+
|
|
266
|
+
def load_json(self, artifact_id: str) -> Optional[Any]:
|
|
267
|
+
"""Load artifact as JSON."""
|
|
268
|
+
artifact = self.load(artifact_id)
|
|
269
|
+
if artifact is None:
|
|
270
|
+
return None
|
|
271
|
+
return artifact.as_json()
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
class InMemoryArtifactStore(ArtifactStore):
|
|
275
|
+
"""In-memory artifact store for testing and development."""
|
|
276
|
+
|
|
277
|
+
def __init__(self) -> None:
|
|
278
|
+
self._artifacts: Dict[str, Artifact] = {}
|
|
279
|
+
|
|
280
|
+
def store(
|
|
281
|
+
self,
|
|
282
|
+
content: bytes,
|
|
283
|
+
*,
|
|
284
|
+
content_type: str = "application/octet-stream",
|
|
285
|
+
run_id: Optional[str] = None,
|
|
286
|
+
tags: Optional[Dict[str, str]] = None,
|
|
287
|
+
artifact_id: Optional[str] = None,
|
|
288
|
+
) -> ArtifactMetadata:
|
|
289
|
+
if artifact_id is None:
|
|
290
|
+
artifact_id = compute_artifact_id(content)
|
|
291
|
+
|
|
292
|
+
metadata = ArtifactMetadata(
|
|
293
|
+
artifact_id=artifact_id,
|
|
294
|
+
content_type=content_type,
|
|
295
|
+
size_bytes=len(content),
|
|
296
|
+
created_at=utc_now_iso(),
|
|
297
|
+
run_id=run_id,
|
|
298
|
+
tags=tags or {},
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
self._artifacts[artifact_id] = Artifact(metadata=metadata, content=content)
|
|
302
|
+
return metadata
|
|
303
|
+
|
|
304
|
+
def load(self, artifact_id: str) -> Optional[Artifact]:
|
|
305
|
+
return self._artifacts.get(artifact_id)
|
|
306
|
+
|
|
307
|
+
def get_metadata(self, artifact_id: str) -> Optional[ArtifactMetadata]:
|
|
308
|
+
artifact = self._artifacts.get(artifact_id)
|
|
309
|
+
if artifact is None:
|
|
310
|
+
return None
|
|
311
|
+
return artifact.metadata
|
|
312
|
+
|
|
313
|
+
def exists(self, artifact_id: str) -> bool:
|
|
314
|
+
return artifact_id in self._artifacts
|
|
315
|
+
|
|
316
|
+
def delete(self, artifact_id: str) -> bool:
|
|
317
|
+
if artifact_id in self._artifacts:
|
|
318
|
+
del self._artifacts[artifact_id]
|
|
319
|
+
return True
|
|
320
|
+
return False
|
|
321
|
+
|
|
322
|
+
def list_by_run(self, run_id: str) -> List[ArtifactMetadata]:
|
|
323
|
+
return [
|
|
324
|
+
a.metadata
|
|
325
|
+
for a in self._artifacts.values()
|
|
326
|
+
if a.metadata.run_id == run_id
|
|
327
|
+
]
|
|
328
|
+
|
|
329
|
+
def list_all(self, *, limit: int = 1000) -> List[ArtifactMetadata]:
|
|
330
|
+
results = [a.metadata for a in self._artifacts.values()]
|
|
331
|
+
# Sort by created_at descending
|
|
332
|
+
results.sort(key=lambda m: m.created_at, reverse=True)
|
|
333
|
+
return results[:limit]
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
class FileArtifactStore(ArtifactStore):
|
|
337
|
+
"""File-based artifact store.
|
|
338
|
+
|
|
339
|
+
Directory structure:
|
|
340
|
+
base_dir/
|
|
341
|
+
artifacts/
|
|
342
|
+
{artifact_id}.bin # content
|
|
343
|
+
{artifact_id}.meta # metadata JSON
|
|
344
|
+
"""
|
|
345
|
+
|
|
346
|
+
def __init__(self, base_dir: Union[str, Path]) -> None:
|
|
347
|
+
self._base = Path(base_dir)
|
|
348
|
+
self._artifacts_dir = self._base / "artifacts"
|
|
349
|
+
self._artifacts_dir.mkdir(parents=True, exist_ok=True)
|
|
350
|
+
|
|
351
|
+
def _content_path(self, artifact_id: str) -> Path:
|
|
352
|
+
validate_artifact_id(artifact_id)
|
|
353
|
+
return self._artifacts_dir / f"{artifact_id}.bin"
|
|
354
|
+
|
|
355
|
+
def _metadata_path(self, artifact_id: str) -> Path:
|
|
356
|
+
validate_artifact_id(artifact_id)
|
|
357
|
+
return self._artifacts_dir / f"{artifact_id}.meta"
|
|
358
|
+
|
|
359
|
+
def store(
|
|
360
|
+
self,
|
|
361
|
+
content: bytes,
|
|
362
|
+
*,
|
|
363
|
+
content_type: str = "application/octet-stream",
|
|
364
|
+
run_id: Optional[str] = None,
|
|
365
|
+
tags: Optional[Dict[str, str]] = None,
|
|
366
|
+
artifact_id: Optional[str] = None,
|
|
367
|
+
) -> ArtifactMetadata:
|
|
368
|
+
if artifact_id is None:
|
|
369
|
+
artifact_id = compute_artifact_id(content)
|
|
370
|
+
|
|
371
|
+
metadata = ArtifactMetadata(
|
|
372
|
+
artifact_id=artifact_id,
|
|
373
|
+
content_type=content_type,
|
|
374
|
+
size_bytes=len(content),
|
|
375
|
+
created_at=utc_now_iso(),
|
|
376
|
+
run_id=run_id,
|
|
377
|
+
tags=tags or {},
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
# Write content
|
|
381
|
+
content_path = self._content_path(artifact_id)
|
|
382
|
+
with open(content_path, "wb") as f:
|
|
383
|
+
f.write(content)
|
|
384
|
+
|
|
385
|
+
# Write metadata
|
|
386
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
387
|
+
with open(metadata_path, "w", encoding="utf-8") as f:
|
|
388
|
+
json.dump(metadata.to_dict(), f, ensure_ascii=False, indent=2)
|
|
389
|
+
|
|
390
|
+
return metadata
|
|
391
|
+
|
|
392
|
+
def load(self, artifact_id: str) -> Optional[Artifact]:
|
|
393
|
+
content_path = self._content_path(artifact_id)
|
|
394
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
395
|
+
|
|
396
|
+
if not content_path.exists() or not metadata_path.exists():
|
|
397
|
+
return None
|
|
398
|
+
|
|
399
|
+
with open(content_path, "rb") as f:
|
|
400
|
+
content = f.read()
|
|
401
|
+
|
|
402
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
403
|
+
metadata_dict = json.load(f)
|
|
404
|
+
|
|
405
|
+
metadata = ArtifactMetadata.from_dict(metadata_dict)
|
|
406
|
+
return Artifact(metadata=metadata, content=content)
|
|
407
|
+
|
|
408
|
+
def get_metadata(self, artifact_id: str) -> Optional[ArtifactMetadata]:
|
|
409
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
410
|
+
|
|
411
|
+
if not metadata_path.exists():
|
|
412
|
+
return None
|
|
413
|
+
|
|
414
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
415
|
+
metadata_dict = json.load(f)
|
|
416
|
+
|
|
417
|
+
return ArtifactMetadata.from_dict(metadata_dict)
|
|
418
|
+
|
|
419
|
+
def exists(self, artifact_id: str) -> bool:
|
|
420
|
+
return self._content_path(artifact_id).exists()
|
|
421
|
+
|
|
422
|
+
def delete(self, artifact_id: str) -> bool:
|
|
423
|
+
content_path = self._content_path(artifact_id)
|
|
424
|
+
metadata_path = self._metadata_path(artifact_id)
|
|
425
|
+
|
|
426
|
+
deleted = False
|
|
427
|
+
if content_path.exists():
|
|
428
|
+
content_path.unlink()
|
|
429
|
+
deleted = True
|
|
430
|
+
if metadata_path.exists():
|
|
431
|
+
metadata_path.unlink()
|
|
432
|
+
deleted = True
|
|
433
|
+
|
|
434
|
+
return deleted
|
|
435
|
+
|
|
436
|
+
def list_by_run(self, run_id: str) -> List[ArtifactMetadata]:
|
|
437
|
+
results = []
|
|
438
|
+
for metadata_path in self._artifacts_dir.glob("*.meta"):
|
|
439
|
+
try:
|
|
440
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
441
|
+
metadata_dict = json.load(f)
|
|
442
|
+
if metadata_dict.get("run_id") == run_id:
|
|
443
|
+
results.append(ArtifactMetadata.from_dict(metadata_dict))
|
|
444
|
+
except (json.JSONDecodeError, IOError):
|
|
445
|
+
continue
|
|
446
|
+
return results
|
|
447
|
+
|
|
448
|
+
def list_all(self, *, limit: int = 1000) -> List[ArtifactMetadata]:
|
|
449
|
+
results = []
|
|
450
|
+
for metadata_path in self._artifacts_dir.glob("*.meta"):
|
|
451
|
+
try:
|
|
452
|
+
with open(metadata_path, "r", encoding="utf-8") as f:
|
|
453
|
+
metadata_dict = json.load(f)
|
|
454
|
+
results.append(ArtifactMetadata.from_dict(metadata_dict))
|
|
455
|
+
except (json.JSONDecodeError, IOError):
|
|
456
|
+
continue
|
|
457
|
+
# Sort by created_at descending
|
|
458
|
+
results.sort(key=lambda m: m.created_at, reverse=True)
|
|
459
|
+
return results[:limit]
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
# Artifact reference helpers for use in RunState.vars
|
|
463
|
+
|
|
464
|
+
def artifact_ref(artifact_id: str) -> Dict[str, str]:
|
|
465
|
+
"""Create an artifact reference for storing in vars.
|
|
466
|
+
|
|
467
|
+
Usage:
|
|
468
|
+
metadata = artifact_store.store_json(large_data)
|
|
469
|
+
run.vars["result"] = artifact_ref(metadata.artifact_id)
|
|
470
|
+
"""
|
|
471
|
+
return {"$artifact": artifact_id}
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
def is_artifact_ref(value: Any) -> bool:
|
|
475
|
+
"""Check if a value is an artifact reference."""
|
|
476
|
+
return isinstance(value, dict) and "$artifact" in value
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
def get_artifact_id(ref: Dict[str, str]) -> str:
|
|
480
|
+
"""Extract artifact ID from a reference."""
|
|
481
|
+
return ref["$artifact"]
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def resolve_artifact(ref: Dict[str, str], store: ArtifactStore) -> Optional[Artifact]:
|
|
485
|
+
"""Resolve an artifact reference to its content."""
|
|
486
|
+
if not is_artifact_ref(ref):
|
|
487
|
+
return None
|
|
488
|
+
return store.load(get_artifact_id(ref))
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""abstractruntime.storage.base
|
|
2
|
+
|
|
3
|
+
Storage interfaces (durability backends).
|
|
4
|
+
|
|
5
|
+
These are intentionally minimal for v0.1.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from abc import ABC, abstractmethod
|
|
11
|
+
from typing import Any, Dict, List, Optional, Protocol, runtime_checkable
|
|
12
|
+
|
|
13
|
+
from ..core.models import RunState, RunStatus, StepRecord, WaitReason
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RunStore(ABC):
|
|
17
|
+
@abstractmethod
|
|
18
|
+
def save(self, run: RunState) -> None: ...
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def load(self, run_id: str) -> Optional[RunState]: ...
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@runtime_checkable
|
|
25
|
+
class QueryableRunStore(Protocol):
|
|
26
|
+
"""Extended interface for querying runs.
|
|
27
|
+
|
|
28
|
+
This is a Protocol (structural typing) so existing RunStore implementations
|
|
29
|
+
can add these methods without changing their inheritance.
|
|
30
|
+
|
|
31
|
+
Used by:
|
|
32
|
+
- Scheduler/driver loops (find due wait_until runs)
|
|
33
|
+
- Operational tooling (list waiting runs)
|
|
34
|
+
- UI backoffice views (runs by status)
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def list_runs(
|
|
38
|
+
self,
|
|
39
|
+
*,
|
|
40
|
+
status: Optional[RunStatus] = None,
|
|
41
|
+
wait_reason: Optional[WaitReason] = None,
|
|
42
|
+
workflow_id: Optional[str] = None,
|
|
43
|
+
limit: int = 100,
|
|
44
|
+
) -> List[RunState]:
|
|
45
|
+
"""List runs matching the given filters.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
status: Filter by run status (RUNNING, WAITING, COMPLETED, FAILED)
|
|
49
|
+
wait_reason: Filter by wait reason (only applies to WAITING runs)
|
|
50
|
+
workflow_id: Filter by workflow ID
|
|
51
|
+
limit: Maximum number of runs to return
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
List of matching RunState objects, ordered by updated_at descending
|
|
55
|
+
"""
|
|
56
|
+
...
|
|
57
|
+
|
|
58
|
+
def list_due_wait_until(
|
|
59
|
+
self,
|
|
60
|
+
*,
|
|
61
|
+
now_iso: str,
|
|
62
|
+
limit: int = 100,
|
|
63
|
+
) -> List[RunState]:
|
|
64
|
+
"""List runs waiting for a time threshold that has passed.
|
|
65
|
+
|
|
66
|
+
This finds runs where:
|
|
67
|
+
- status == WAITING
|
|
68
|
+
- waiting.reason == UNTIL
|
|
69
|
+
- waiting.until <= now_iso
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
now_iso: Current time as ISO 8601 string
|
|
73
|
+
limit: Maximum number of runs to return
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
List of due RunState objects, ordered by waiting.until ascending
|
|
77
|
+
"""
|
|
78
|
+
...
|
|
79
|
+
|
|
80
|
+
def list_children(
|
|
81
|
+
self,
|
|
82
|
+
*,
|
|
83
|
+
parent_run_id: str,
|
|
84
|
+
status: Optional[RunStatus] = None,
|
|
85
|
+
) -> List[RunState]:
|
|
86
|
+
"""List child runs of a parent.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
parent_run_id: The parent run ID
|
|
90
|
+
status: Optional filter by status
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
List of child RunState objects
|
|
94
|
+
"""
|
|
95
|
+
...
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class LedgerStore(ABC):
|
|
99
|
+
"""Append-only journal store."""
|
|
100
|
+
|
|
101
|
+
@abstractmethod
|
|
102
|
+
def append(self, record: StepRecord) -> None: ...
|
|
103
|
+
|
|
104
|
+
@abstractmethod
|
|
105
|
+
def list(self, run_id: str) -> List[Dict[str, Any]]: ...
|
|
106
|
+
|
|
107
|
+
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""abstractruntime.storage.in_memory
|
|
2
|
+
|
|
3
|
+
In-memory durability backends (testing/dev).
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from dataclasses import asdict
|
|
9
|
+
from typing import Any, Dict, List, Optional
|
|
10
|
+
|
|
11
|
+
from .base import LedgerStore, RunStore
|
|
12
|
+
from ..core.models import RunState, RunStatus, StepRecord, WaitReason
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class InMemoryRunStore(RunStore):
|
|
16
|
+
"""In-memory run store with query support.
|
|
17
|
+
|
|
18
|
+
Implements both RunStore (ABC) and QueryableRunStore (Protocol).
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self):
|
|
22
|
+
self._runs: Dict[str, RunState] = {}
|
|
23
|
+
|
|
24
|
+
def save(self, run: RunState) -> None:
|
|
25
|
+
# store a shallow copy to avoid accidental mutation surprises
|
|
26
|
+
self._runs[run.run_id] = run
|
|
27
|
+
|
|
28
|
+
def load(self, run_id: str) -> Optional[RunState]:
|
|
29
|
+
return self._runs.get(run_id)
|
|
30
|
+
|
|
31
|
+
# --- QueryableRunStore methods ---
|
|
32
|
+
|
|
33
|
+
def list_runs(
|
|
34
|
+
self,
|
|
35
|
+
*,
|
|
36
|
+
status: Optional[RunStatus] = None,
|
|
37
|
+
wait_reason: Optional[WaitReason] = None,
|
|
38
|
+
workflow_id: Optional[str] = None,
|
|
39
|
+
limit: int = 100,
|
|
40
|
+
) -> List[RunState]:
|
|
41
|
+
"""List runs matching the given filters."""
|
|
42
|
+
results: List[RunState] = []
|
|
43
|
+
|
|
44
|
+
for run in self._runs.values():
|
|
45
|
+
# Apply filters
|
|
46
|
+
if status is not None and run.status != status:
|
|
47
|
+
continue
|
|
48
|
+
if workflow_id is not None and run.workflow_id != workflow_id:
|
|
49
|
+
continue
|
|
50
|
+
if wait_reason is not None:
|
|
51
|
+
if run.waiting is None or run.waiting.reason != wait_reason:
|
|
52
|
+
continue
|
|
53
|
+
|
|
54
|
+
results.append(run)
|
|
55
|
+
|
|
56
|
+
# Sort by updated_at descending (most recent first)
|
|
57
|
+
results.sort(key=lambda r: r.updated_at or "", reverse=True)
|
|
58
|
+
|
|
59
|
+
return results[:limit]
|
|
60
|
+
|
|
61
|
+
def list_due_wait_until(
|
|
62
|
+
self,
|
|
63
|
+
*,
|
|
64
|
+
now_iso: str,
|
|
65
|
+
limit: int = 100,
|
|
66
|
+
) -> List[RunState]:
|
|
67
|
+
"""List runs waiting for a time threshold that has passed."""
|
|
68
|
+
results: List[RunState] = []
|
|
69
|
+
|
|
70
|
+
for run in self._runs.values():
|
|
71
|
+
# Must be WAITING with reason UNTIL
|
|
72
|
+
if run.status != RunStatus.WAITING:
|
|
73
|
+
continue
|
|
74
|
+
if run.waiting is None:
|
|
75
|
+
continue
|
|
76
|
+
if run.waiting.reason != WaitReason.UNTIL:
|
|
77
|
+
continue
|
|
78
|
+
if run.waiting.until is None:
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
# Check if the wait time has passed (ISO string comparison works for UTC)
|
|
82
|
+
if run.waiting.until <= now_iso:
|
|
83
|
+
results.append(run)
|
|
84
|
+
|
|
85
|
+
# Sort by waiting.until ascending (earliest due first)
|
|
86
|
+
results.sort(key=lambda r: r.waiting.until if r.waiting else "")
|
|
87
|
+
|
|
88
|
+
return results[:limit]
|
|
89
|
+
|
|
90
|
+
def list_children(
|
|
91
|
+
self,
|
|
92
|
+
*,
|
|
93
|
+
parent_run_id: str,
|
|
94
|
+
status: Optional[RunStatus] = None,
|
|
95
|
+
) -> List[RunState]:
|
|
96
|
+
"""List child runs of a parent."""
|
|
97
|
+
results: List[RunState] = []
|
|
98
|
+
|
|
99
|
+
for run in self._runs.values():
|
|
100
|
+
if run.parent_run_id != parent_run_id:
|
|
101
|
+
continue
|
|
102
|
+
if status is not None and run.status != status:
|
|
103
|
+
continue
|
|
104
|
+
results.append(run)
|
|
105
|
+
|
|
106
|
+
return results
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class InMemoryLedgerStore(LedgerStore):
|
|
110
|
+
def __init__(self):
|
|
111
|
+
self._records: Dict[str, List[Dict[str, Any]]] = {}
|
|
112
|
+
|
|
113
|
+
def append(self, record: StepRecord) -> None:
|
|
114
|
+
self._records.setdefault(record.run_id, []).append(asdict(record))
|
|
115
|
+
|
|
116
|
+
def list(self, run_id: str) -> List[Dict[str, Any]]:
|
|
117
|
+
return list(self._records.get(run_id, []))
|
|
118
|
+
|
|
119
|
+
|