alma-memory 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alma/__init__.py +296 -194
- alma/compression/__init__.py +33 -0
- alma/compression/pipeline.py +980 -0
- alma/confidence/__init__.py +47 -47
- alma/confidence/engine.py +540 -540
- alma/confidence/types.py +351 -351
- alma/config/loader.py +157 -157
- alma/consolidation/__init__.py +23 -23
- alma/consolidation/engine.py +678 -678
- alma/consolidation/prompts.py +84 -84
- alma/core.py +1189 -322
- alma/domains/__init__.py +30 -30
- alma/domains/factory.py +359 -359
- alma/domains/schemas.py +448 -448
- alma/domains/types.py +272 -272
- alma/events/__init__.py +75 -75
- alma/events/emitter.py +285 -284
- alma/events/storage_mixin.py +246 -246
- alma/events/types.py +126 -126
- alma/events/webhook.py +425 -425
- alma/exceptions.py +49 -49
- alma/extraction/__init__.py +31 -31
- alma/extraction/auto_learner.py +265 -264
- alma/extraction/extractor.py +420 -420
- alma/graph/__init__.py +106 -81
- alma/graph/backends/__init__.py +32 -18
- alma/graph/backends/kuzu.py +624 -0
- alma/graph/backends/memgraph.py +432 -0
- alma/graph/backends/memory.py +236 -236
- alma/graph/backends/neo4j.py +417 -417
- alma/graph/base.py +159 -159
- alma/graph/extraction.py +198 -198
- alma/graph/store.py +860 -860
- alma/harness/__init__.py +35 -35
- alma/harness/base.py +386 -386
- alma/harness/domains.py +705 -705
- alma/initializer/__init__.py +37 -37
- alma/initializer/initializer.py +418 -418
- alma/initializer/types.py +250 -250
- alma/integration/__init__.py +62 -62
- alma/integration/claude_agents.py +444 -432
- alma/integration/helena.py +423 -423
- alma/integration/victor.py +471 -471
- alma/learning/__init__.py +101 -86
- alma/learning/decay.py +878 -0
- alma/learning/forgetting.py +1446 -1446
- alma/learning/heuristic_extractor.py +390 -390
- alma/learning/protocols.py +374 -374
- alma/learning/validation.py +346 -346
- alma/mcp/__init__.py +123 -45
- alma/mcp/__main__.py +156 -156
- alma/mcp/resources.py +122 -122
- alma/mcp/server.py +955 -591
- alma/mcp/tools.py +3254 -511
- alma/observability/__init__.py +91 -0
- alma/observability/config.py +302 -0
- alma/observability/guidelines.py +170 -0
- alma/observability/logging.py +424 -0
- alma/observability/metrics.py +583 -0
- alma/observability/tracing.py +440 -0
- alma/progress/__init__.py +21 -21
- alma/progress/tracker.py +607 -607
- alma/progress/types.py +250 -250
- alma/retrieval/__init__.py +134 -53
- alma/retrieval/budget.py +525 -0
- alma/retrieval/cache.py +1304 -1061
- alma/retrieval/embeddings.py +202 -202
- alma/retrieval/engine.py +850 -366
- alma/retrieval/modes.py +365 -0
- alma/retrieval/progressive.py +560 -0
- alma/retrieval/scoring.py +344 -344
- alma/retrieval/trust_scoring.py +637 -0
- alma/retrieval/verification.py +797 -0
- alma/session/__init__.py +19 -19
- alma/session/manager.py +442 -399
- alma/session/types.py +288 -288
- alma/storage/__init__.py +101 -61
- alma/storage/archive.py +233 -0
- alma/storage/azure_cosmos.py +1259 -1048
- alma/storage/base.py +1083 -525
- alma/storage/chroma.py +1443 -1443
- alma/storage/constants.py +103 -0
- alma/storage/file_based.py +614 -619
- alma/storage/migrations/__init__.py +21 -0
- alma/storage/migrations/base.py +321 -0
- alma/storage/migrations/runner.py +323 -0
- alma/storage/migrations/version_stores.py +337 -0
- alma/storage/migrations/versions/__init__.py +11 -0
- alma/storage/migrations/versions/v1_0_0.py +373 -0
- alma/storage/migrations/versions/v1_1_0_workflow_context.py +551 -0
- alma/storage/pinecone.py +1080 -1080
- alma/storage/postgresql.py +1948 -1452
- alma/storage/qdrant.py +1306 -1306
- alma/storage/sqlite_local.py +3041 -1358
- alma/testing/__init__.py +46 -0
- alma/testing/factories.py +301 -0
- alma/testing/mocks.py +389 -0
- alma/types.py +292 -264
- alma/utils/__init__.py +19 -0
- alma/utils/tokenizer.py +521 -0
- alma/workflow/__init__.py +83 -0
- alma/workflow/artifacts.py +170 -0
- alma/workflow/checkpoint.py +311 -0
- alma/workflow/context.py +228 -0
- alma/workflow/outcomes.py +189 -0
- alma/workflow/reducers.py +393 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/METADATA +244 -72
- alma_memory-0.7.0.dist-info/RECORD +112 -0
- alma_memory-0.5.0.dist-info/RECORD +0 -76
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/WHEEL +0 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/top_level.txt +0 -0
alma/storage/base.py
CHANGED
|
@@ -1,525 +1,1083 @@
|
|
|
1
|
-
"""
|
|
2
|
-
ALMA Storage Backend Interface.
|
|
3
|
-
|
|
4
|
-
Abstract base class that all storage backends must implement.
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
@abstractmethod
|
|
43
|
-
def
|
|
44
|
-
"""Save a
|
|
45
|
-
pass
|
|
46
|
-
|
|
47
|
-
@abstractmethod
|
|
48
|
-
def
|
|
49
|
-
"""Save
|
|
50
|
-
pass
|
|
51
|
-
|
|
52
|
-
@abstractmethod
|
|
53
|
-
def
|
|
54
|
-
"""Save
|
|
55
|
-
pass
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def
|
|
64
|
-
"""Save
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
) -> List[str]:
|
|
70
|
-
"""Save multiple
|
|
71
|
-
return [self.
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
"""
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
"""
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
1
|
+
"""
|
|
2
|
+
ALMA Storage Backend Interface.
|
|
3
|
+
|
|
4
|
+
Abstract base class that all storage backends must implement.
|
|
5
|
+
|
|
6
|
+
v0.6.0 adds workflow context support:
|
|
7
|
+
- Checkpoint CRUD operations
|
|
8
|
+
- WorkflowOutcome storage and retrieval
|
|
9
|
+
- ArtifactRef linking
|
|
10
|
+
- scope_filter parameter for workflow-scoped queries
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from abc import ABC, abstractmethod
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
|
16
|
+
|
|
17
|
+
from alma.types import (
|
|
18
|
+
AntiPattern,
|
|
19
|
+
DomainKnowledge,
|
|
20
|
+
Heuristic,
|
|
21
|
+
Outcome,
|
|
22
|
+
UserPreference,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from alma.session import SessionHandoff
|
|
27
|
+
from alma.workflow import ArtifactRef, Checkpoint, WorkflowOutcome
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class StorageBackend(ABC):
|
|
31
|
+
"""
|
|
32
|
+
Abstract base class for ALMA storage backends.
|
|
33
|
+
|
|
34
|
+
Implementations:
|
|
35
|
+
- FileBasedStorage: JSON files (testing/fallback)
|
|
36
|
+
- SQLiteStorage: Local SQLite + FAISS vectors
|
|
37
|
+
- AzureCosmosStorage: Production Azure Cosmos DB
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
# ==================== WRITE OPERATIONS ====================
|
|
41
|
+
|
|
42
|
+
@abstractmethod
|
|
43
|
+
def save_heuristic(self, heuristic: Heuristic) -> str:
|
|
44
|
+
"""Save a heuristic, return its ID."""
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
def save_outcome(self, outcome: Outcome) -> str:
|
|
49
|
+
"""Save an outcome, return its ID."""
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
def save_user_preference(self, preference: UserPreference) -> str:
|
|
54
|
+
"""Save a user preference, return its ID."""
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
@abstractmethod
|
|
58
|
+
def save_domain_knowledge(self, knowledge: DomainKnowledge) -> str:
|
|
59
|
+
"""Save domain knowledge, return its ID."""
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
@abstractmethod
|
|
63
|
+
def save_anti_pattern(self, anti_pattern: AntiPattern) -> str:
|
|
64
|
+
"""Save an anti-pattern, return its ID."""
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
# ==================== BATCH WRITE OPERATIONS ====================
|
|
68
|
+
|
|
69
|
+
def save_heuristics(self, heuristics: List[Heuristic]) -> List[str]:
|
|
70
|
+
"""Save multiple heuristics in a batch. Default implementation calls save_heuristic in a loop."""
|
|
71
|
+
return [self.save_heuristic(h) for h in heuristics]
|
|
72
|
+
|
|
73
|
+
def save_outcomes(self, outcomes: List[Outcome]) -> List[str]:
|
|
74
|
+
"""Save multiple outcomes in a batch. Default implementation calls save_outcome in a loop."""
|
|
75
|
+
return [self.save_outcome(o) for o in outcomes]
|
|
76
|
+
|
|
77
|
+
def save_domain_knowledge_batch(
|
|
78
|
+
self, knowledge_items: List[DomainKnowledge]
|
|
79
|
+
) -> List[str]:
|
|
80
|
+
"""Save multiple domain knowledge items in a batch. Default implementation calls save_domain_knowledge in a loop."""
|
|
81
|
+
return [self.save_domain_knowledge(k) for k in knowledge_items]
|
|
82
|
+
|
|
83
|
+
# ==================== READ OPERATIONS ====================
|
|
84
|
+
|
|
85
|
+
@abstractmethod
|
|
86
|
+
def get_heuristics(
|
|
87
|
+
self,
|
|
88
|
+
project_id: str,
|
|
89
|
+
agent: Optional[str] = None,
|
|
90
|
+
embedding: Optional[List[float]] = None,
|
|
91
|
+
top_k: int = 5,
|
|
92
|
+
min_confidence: float = 0.0,
|
|
93
|
+
scope_filter: Optional[Dict[str, Any]] = None,
|
|
94
|
+
) -> List[Heuristic]:
|
|
95
|
+
"""
|
|
96
|
+
Get heuristics, optionally filtered by agent and similarity.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
project_id: Project to query
|
|
100
|
+
agent: Filter by agent name
|
|
101
|
+
embedding: Query embedding for semantic search
|
|
102
|
+
top_k: Max results to return
|
|
103
|
+
min_confidence: Minimum confidence threshold
|
|
104
|
+
scope_filter: Optional workflow scope filter (v0.6.0+)
|
|
105
|
+
Keys: tenant_id, workflow_id, run_id, node_id
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
List of matching heuristics
|
|
109
|
+
"""
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def get_outcomes(
|
|
114
|
+
self,
|
|
115
|
+
project_id: str,
|
|
116
|
+
agent: Optional[str] = None,
|
|
117
|
+
task_type: Optional[str] = None,
|
|
118
|
+
embedding: Optional[List[float]] = None,
|
|
119
|
+
top_k: int = 5,
|
|
120
|
+
success_only: bool = False,
|
|
121
|
+
scope_filter: Optional[Dict[str, Any]] = None,
|
|
122
|
+
) -> List[Outcome]:
|
|
123
|
+
"""
|
|
124
|
+
Get outcomes, optionally filtered.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
project_id: Project to query
|
|
128
|
+
agent: Filter by agent name
|
|
129
|
+
task_type: Filter by task type
|
|
130
|
+
embedding: Query embedding for semantic search
|
|
131
|
+
top_k: Max results
|
|
132
|
+
success_only: Only return successful outcomes
|
|
133
|
+
scope_filter: Optional workflow scope filter (v0.6.0+)
|
|
134
|
+
Keys: tenant_id, workflow_id, run_id, node_id
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
List of matching outcomes
|
|
138
|
+
"""
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
@abstractmethod
|
|
142
|
+
def get_user_preferences(
|
|
143
|
+
self,
|
|
144
|
+
user_id: str,
|
|
145
|
+
category: Optional[str] = None,
|
|
146
|
+
) -> List[UserPreference]:
|
|
147
|
+
"""
|
|
148
|
+
Get user preferences.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
user_id: User to query
|
|
152
|
+
category: Optional category filter
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
List of user preferences
|
|
156
|
+
"""
|
|
157
|
+
pass
|
|
158
|
+
|
|
159
|
+
@abstractmethod
|
|
160
|
+
def get_domain_knowledge(
|
|
161
|
+
self,
|
|
162
|
+
project_id: str,
|
|
163
|
+
agent: Optional[str] = None,
|
|
164
|
+
domain: Optional[str] = None,
|
|
165
|
+
embedding: Optional[List[float]] = None,
|
|
166
|
+
top_k: int = 5,
|
|
167
|
+
scope_filter: Optional[Dict[str, Any]] = None,
|
|
168
|
+
) -> List[DomainKnowledge]:
|
|
169
|
+
"""
|
|
170
|
+
Get domain knowledge.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
project_id: Project to query
|
|
174
|
+
agent: Filter by agent
|
|
175
|
+
domain: Filter by domain
|
|
176
|
+
embedding: Query embedding for semantic search
|
|
177
|
+
top_k: Max results
|
|
178
|
+
scope_filter: Optional workflow scope filter (v0.6.0+)
|
|
179
|
+
Keys: tenant_id, workflow_id, run_id, node_id
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
List of domain knowledge items
|
|
183
|
+
"""
|
|
184
|
+
pass
|
|
185
|
+
|
|
186
|
+
@abstractmethod
|
|
187
|
+
def get_anti_patterns(
|
|
188
|
+
self,
|
|
189
|
+
project_id: str,
|
|
190
|
+
agent: Optional[str] = None,
|
|
191
|
+
embedding: Optional[List[float]] = None,
|
|
192
|
+
top_k: int = 5,
|
|
193
|
+
scope_filter: Optional[Dict[str, Any]] = None,
|
|
194
|
+
) -> List[AntiPattern]:
|
|
195
|
+
"""
|
|
196
|
+
Get anti-patterns.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
project_id: Project to query
|
|
200
|
+
agent: Filter by agent
|
|
201
|
+
embedding: Query embedding for semantic search
|
|
202
|
+
top_k: Max results
|
|
203
|
+
scope_filter: Optional workflow scope filter (v0.6.0+)
|
|
204
|
+
Keys: tenant_id, workflow_id, run_id, node_id
|
|
205
|
+
|
|
206
|
+
Returns:
|
|
207
|
+
List of anti-patterns
|
|
208
|
+
"""
|
|
209
|
+
pass
|
|
210
|
+
|
|
211
|
+
# ==================== UPDATE OPERATIONS ====================
|
|
212
|
+
|
|
213
|
+
@abstractmethod
|
|
214
|
+
def update_heuristic(
|
|
215
|
+
self,
|
|
216
|
+
heuristic_id: str,
|
|
217
|
+
updates: Dict[str, Any],
|
|
218
|
+
) -> bool:
|
|
219
|
+
"""
|
|
220
|
+
Update a heuristic's fields.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
heuristic_id: ID of heuristic to update
|
|
224
|
+
updates: Dict of field->value updates
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
True if updated, False if not found
|
|
228
|
+
"""
|
|
229
|
+
pass
|
|
230
|
+
|
|
231
|
+
@abstractmethod
|
|
232
|
+
def increment_heuristic_occurrence(
|
|
233
|
+
self,
|
|
234
|
+
heuristic_id: str,
|
|
235
|
+
success: bool,
|
|
236
|
+
) -> bool:
|
|
237
|
+
"""
|
|
238
|
+
Increment heuristic occurrence count.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
heuristic_id: ID of heuristic
|
|
242
|
+
success: Whether this occurrence was successful
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
True if updated
|
|
246
|
+
"""
|
|
247
|
+
pass
|
|
248
|
+
|
|
249
|
+
@abstractmethod
|
|
250
|
+
def update_heuristic_confidence(
|
|
251
|
+
self,
|
|
252
|
+
heuristic_id: str,
|
|
253
|
+
new_confidence: float,
|
|
254
|
+
) -> bool:
|
|
255
|
+
"""
|
|
256
|
+
Update a heuristic's confidence value.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
heuristic_id: ID of heuristic to update
|
|
260
|
+
new_confidence: New confidence value (0.0 - 1.0)
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
True if updated, False if not found
|
|
264
|
+
"""
|
|
265
|
+
pass
|
|
266
|
+
|
|
267
|
+
@abstractmethod
|
|
268
|
+
def update_knowledge_confidence(
|
|
269
|
+
self,
|
|
270
|
+
knowledge_id: str,
|
|
271
|
+
new_confidence: float,
|
|
272
|
+
) -> bool:
|
|
273
|
+
"""
|
|
274
|
+
Update domain knowledge confidence value.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
knowledge_id: ID of knowledge to update
|
|
278
|
+
new_confidence: New confidence value (0.0 - 1.0)
|
|
279
|
+
|
|
280
|
+
Returns:
|
|
281
|
+
True if updated, False if not found
|
|
282
|
+
"""
|
|
283
|
+
pass
|
|
284
|
+
|
|
285
|
+
# ==================== DELETE OPERATIONS ====================
|
|
286
|
+
|
|
287
|
+
@abstractmethod
|
|
288
|
+
def delete_heuristic(self, heuristic_id: str) -> bool:
|
|
289
|
+
"""
|
|
290
|
+
Delete a heuristic by ID.
|
|
291
|
+
|
|
292
|
+
Args:
|
|
293
|
+
heuristic_id: ID of heuristic to delete
|
|
294
|
+
|
|
295
|
+
Returns:
|
|
296
|
+
True if deleted, False if not found
|
|
297
|
+
"""
|
|
298
|
+
pass
|
|
299
|
+
|
|
300
|
+
@abstractmethod
|
|
301
|
+
def delete_outcome(self, outcome_id: str) -> bool:
|
|
302
|
+
"""
|
|
303
|
+
Delete an outcome by ID.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
outcome_id: ID of outcome to delete
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
True if deleted, False if not found
|
|
310
|
+
"""
|
|
311
|
+
pass
|
|
312
|
+
|
|
313
|
+
@abstractmethod
|
|
314
|
+
def delete_domain_knowledge(self, knowledge_id: str) -> bool:
|
|
315
|
+
"""
|
|
316
|
+
Delete domain knowledge by ID.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
knowledge_id: ID of knowledge to delete
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
True if deleted, False if not found
|
|
323
|
+
"""
|
|
324
|
+
pass
|
|
325
|
+
|
|
326
|
+
@abstractmethod
|
|
327
|
+
def delete_anti_pattern(self, anti_pattern_id: str) -> bool:
|
|
328
|
+
"""
|
|
329
|
+
Delete an anti-pattern by ID.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
anti_pattern_id: ID of anti-pattern to delete
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
True if deleted, False if not found
|
|
336
|
+
"""
|
|
337
|
+
pass
|
|
338
|
+
|
|
339
|
+
@abstractmethod
|
|
340
|
+
def delete_outcomes_older_than(
|
|
341
|
+
self,
|
|
342
|
+
project_id: str,
|
|
343
|
+
older_than: datetime,
|
|
344
|
+
agent: Optional[str] = None,
|
|
345
|
+
) -> int:
|
|
346
|
+
"""
|
|
347
|
+
Delete old outcomes.
|
|
348
|
+
|
|
349
|
+
Args:
|
|
350
|
+
project_id: Project to prune
|
|
351
|
+
older_than: Delete outcomes older than this
|
|
352
|
+
agent: Optional agent filter
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
Number of items deleted
|
|
356
|
+
"""
|
|
357
|
+
pass
|
|
358
|
+
|
|
359
|
+
@abstractmethod
|
|
360
|
+
def delete_low_confidence_heuristics(
|
|
361
|
+
self,
|
|
362
|
+
project_id: str,
|
|
363
|
+
below_confidence: float,
|
|
364
|
+
agent: Optional[str] = None,
|
|
365
|
+
) -> int:
|
|
366
|
+
"""
|
|
367
|
+
Delete low-confidence heuristics.
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
project_id: Project to prune
|
|
371
|
+
below_confidence: Delete below this threshold
|
|
372
|
+
agent: Optional agent filter
|
|
373
|
+
|
|
374
|
+
Returns:
|
|
375
|
+
Number of items deleted
|
|
376
|
+
"""
|
|
377
|
+
pass
|
|
378
|
+
|
|
379
|
+
# ==================== MULTI-AGENT MEMORY SHARING ====================
|
|
380
|
+
|
|
381
|
+
def get_heuristics_for_agents(
|
|
382
|
+
self,
|
|
383
|
+
project_id: str,
|
|
384
|
+
agents: List[str],
|
|
385
|
+
embedding: Optional[List[float]] = None,
|
|
386
|
+
top_k: int = 5,
|
|
387
|
+
min_confidence: float = 0.0,
|
|
388
|
+
) -> List[Heuristic]:
|
|
389
|
+
"""
|
|
390
|
+
Get heuristics from multiple agents in one call.
|
|
391
|
+
|
|
392
|
+
This enables multi-agent memory sharing where an agent can
|
|
393
|
+
read memories from agents it inherits from.
|
|
394
|
+
|
|
395
|
+
Args:
|
|
396
|
+
project_id: Project to query
|
|
397
|
+
agents: List of agent names to query
|
|
398
|
+
embedding: Query embedding for semantic search
|
|
399
|
+
top_k: Max results to return per agent
|
|
400
|
+
min_confidence: Minimum confidence threshold
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
List of matching heuristics from all specified agents
|
|
404
|
+
"""
|
|
405
|
+
# Default implementation: query each agent individually
|
|
406
|
+
results = []
|
|
407
|
+
for agent in agents:
|
|
408
|
+
agent_heuristics = self.get_heuristics(
|
|
409
|
+
project_id=project_id,
|
|
410
|
+
agent=agent,
|
|
411
|
+
embedding=embedding,
|
|
412
|
+
top_k=top_k,
|
|
413
|
+
min_confidence=min_confidence,
|
|
414
|
+
)
|
|
415
|
+
results.extend(agent_heuristics)
|
|
416
|
+
return results
|
|
417
|
+
|
|
418
|
+
def get_outcomes_for_agents(
|
|
419
|
+
self,
|
|
420
|
+
project_id: str,
|
|
421
|
+
agents: List[str],
|
|
422
|
+
task_type: Optional[str] = None,
|
|
423
|
+
embedding: Optional[List[float]] = None,
|
|
424
|
+
top_k: int = 5,
|
|
425
|
+
success_only: bool = False,
|
|
426
|
+
) -> List[Outcome]:
|
|
427
|
+
"""
|
|
428
|
+
Get outcomes from multiple agents in one call.
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
project_id: Project to query
|
|
432
|
+
agents: List of agent names to query
|
|
433
|
+
task_type: Filter by task type
|
|
434
|
+
embedding: Query embedding for semantic search
|
|
435
|
+
top_k: Max results to return per agent
|
|
436
|
+
success_only: Only return successful outcomes
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
List of matching outcomes from all specified agents
|
|
440
|
+
"""
|
|
441
|
+
results = []
|
|
442
|
+
for agent in agents:
|
|
443
|
+
agent_outcomes = self.get_outcomes(
|
|
444
|
+
project_id=project_id,
|
|
445
|
+
agent=agent,
|
|
446
|
+
task_type=task_type,
|
|
447
|
+
embedding=embedding,
|
|
448
|
+
top_k=top_k,
|
|
449
|
+
success_only=success_only,
|
|
450
|
+
)
|
|
451
|
+
results.extend(agent_outcomes)
|
|
452
|
+
return results
|
|
453
|
+
|
|
454
|
+
def get_domain_knowledge_for_agents(
|
|
455
|
+
self,
|
|
456
|
+
project_id: str,
|
|
457
|
+
agents: List[str],
|
|
458
|
+
domain: Optional[str] = None,
|
|
459
|
+
embedding: Optional[List[float]] = None,
|
|
460
|
+
top_k: int = 5,
|
|
461
|
+
) -> List[DomainKnowledge]:
|
|
462
|
+
"""
|
|
463
|
+
Get domain knowledge from multiple agents in one call.
|
|
464
|
+
|
|
465
|
+
Args:
|
|
466
|
+
project_id: Project to query
|
|
467
|
+
agents: List of agent names to query
|
|
468
|
+
domain: Filter by domain
|
|
469
|
+
embedding: Query embedding for semantic search
|
|
470
|
+
top_k: Max results to return per agent
|
|
471
|
+
|
|
472
|
+
Returns:
|
|
473
|
+
List of matching domain knowledge from all specified agents
|
|
474
|
+
"""
|
|
475
|
+
results = []
|
|
476
|
+
for agent in agents:
|
|
477
|
+
agent_knowledge = self.get_domain_knowledge(
|
|
478
|
+
project_id=project_id,
|
|
479
|
+
agent=agent,
|
|
480
|
+
domain=domain,
|
|
481
|
+
embedding=embedding,
|
|
482
|
+
top_k=top_k,
|
|
483
|
+
)
|
|
484
|
+
results.extend(agent_knowledge)
|
|
485
|
+
return results
|
|
486
|
+
|
|
487
|
+
def get_anti_patterns_for_agents(
|
|
488
|
+
self,
|
|
489
|
+
project_id: str,
|
|
490
|
+
agents: List[str],
|
|
491
|
+
embedding: Optional[List[float]] = None,
|
|
492
|
+
top_k: int = 5,
|
|
493
|
+
) -> List[AntiPattern]:
|
|
494
|
+
"""
|
|
495
|
+
Get anti-patterns from multiple agents in one call.
|
|
496
|
+
|
|
497
|
+
Args:
|
|
498
|
+
project_id: Project to query
|
|
499
|
+
agents: List of agent names to query
|
|
500
|
+
embedding: Query embedding for semantic search
|
|
501
|
+
top_k: Max results to return per agent
|
|
502
|
+
|
|
503
|
+
Returns:
|
|
504
|
+
List of matching anti-patterns from all specified agents
|
|
505
|
+
"""
|
|
506
|
+
results = []
|
|
507
|
+
for agent in agents:
|
|
508
|
+
agent_patterns = self.get_anti_patterns(
|
|
509
|
+
project_id=project_id,
|
|
510
|
+
agent=agent,
|
|
511
|
+
embedding=embedding,
|
|
512
|
+
top_k=top_k,
|
|
513
|
+
)
|
|
514
|
+
results.extend(agent_patterns)
|
|
515
|
+
return results
|
|
516
|
+
|
|
517
|
+
# ==================== STATS ====================
|
|
518
|
+
|
|
519
|
+
@abstractmethod
|
|
520
|
+
def get_stats(
|
|
521
|
+
self,
|
|
522
|
+
project_id: str,
|
|
523
|
+
agent: Optional[str] = None,
|
|
524
|
+
) -> Dict[str, Any]:
|
|
525
|
+
"""
|
|
526
|
+
Get memory statistics.
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
Dict with counts per memory type, total size, etc.
|
|
530
|
+
"""
|
|
531
|
+
pass
|
|
532
|
+
|
|
533
|
+
# ==================== MIGRATION SUPPORT ====================
|
|
534
|
+
|
|
535
|
+
def get_schema_version(self) -> Optional[str]:
|
|
536
|
+
"""
|
|
537
|
+
Get the current schema version.
|
|
538
|
+
|
|
539
|
+
Returns:
|
|
540
|
+
Current schema version string, or None if not tracked
|
|
541
|
+
"""
|
|
542
|
+
# Default implementation returns None (no version tracking)
|
|
543
|
+
return None
|
|
544
|
+
|
|
545
|
+
def get_migration_status(self) -> Dict[str, Any]:
|
|
546
|
+
"""
|
|
547
|
+
Get migration status information.
|
|
548
|
+
|
|
549
|
+
Returns:
|
|
550
|
+
Dict with current version, pending migrations, etc.
|
|
551
|
+
"""
|
|
552
|
+
return {
|
|
553
|
+
"current_version": self.get_schema_version(),
|
|
554
|
+
"target_version": None,
|
|
555
|
+
"pending_count": 0,
|
|
556
|
+
"pending_versions": [],
|
|
557
|
+
"needs_migration": False,
|
|
558
|
+
"migration_supported": False,
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
def migrate(
|
|
562
|
+
self, target_version: Optional[str] = None, dry_run: bool = False
|
|
563
|
+
) -> List[str]:
|
|
564
|
+
"""
|
|
565
|
+
Apply pending schema migrations.
|
|
566
|
+
|
|
567
|
+
Args:
|
|
568
|
+
target_version: Optional target version (applies all if not specified)
|
|
569
|
+
dry_run: If True, show what would be done without making changes
|
|
570
|
+
|
|
571
|
+
Returns:
|
|
572
|
+
List of applied migration versions
|
|
573
|
+
"""
|
|
574
|
+
# Default implementation does nothing
|
|
575
|
+
return []
|
|
576
|
+
|
|
577
|
+
def rollback(self, target_version: str, dry_run: bool = False) -> List[str]:
|
|
578
|
+
"""
|
|
579
|
+
Roll back schema to a previous version.
|
|
580
|
+
|
|
581
|
+
Args:
|
|
582
|
+
target_version: Version to roll back to
|
|
583
|
+
dry_run: If True, show what would be done without making changes
|
|
584
|
+
|
|
585
|
+
Returns:
|
|
586
|
+
List of rolled back migration versions
|
|
587
|
+
"""
|
|
588
|
+
# Default implementation does nothing
|
|
589
|
+
return []
|
|
590
|
+
|
|
591
|
+
# ==================== CHECKPOINT OPERATIONS (v0.6.0+) ====================
|
|
592
|
+
|
|
593
|
+
def save_checkpoint(self, checkpoint: "Checkpoint") -> str:
|
|
594
|
+
"""
|
|
595
|
+
Save a workflow checkpoint.
|
|
596
|
+
|
|
597
|
+
Args:
|
|
598
|
+
checkpoint: Checkpoint to save
|
|
599
|
+
|
|
600
|
+
Returns:
|
|
601
|
+
The checkpoint ID
|
|
602
|
+
|
|
603
|
+
Note: Default implementation raises NotImplementedError.
|
|
604
|
+
Backends should override for workflow support.
|
|
605
|
+
"""
|
|
606
|
+
raise NotImplementedError(
|
|
607
|
+
f"{self.__class__.__name__} does not support checkpoints. "
|
|
608
|
+
"Use SQLiteStorage or PostgreSQLStorage for workflow features."
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
def get_checkpoint(self, checkpoint_id: str) -> Optional["Checkpoint"]:
|
|
612
|
+
"""
|
|
613
|
+
Get a checkpoint by ID.
|
|
614
|
+
|
|
615
|
+
Args:
|
|
616
|
+
checkpoint_id: The checkpoint ID
|
|
617
|
+
|
|
618
|
+
Returns:
|
|
619
|
+
The checkpoint, or None if not found
|
|
620
|
+
"""
|
|
621
|
+
raise NotImplementedError(
|
|
622
|
+
f"{self.__class__.__name__} does not support checkpoints."
|
|
623
|
+
)
|
|
624
|
+
|
|
625
|
+
def get_latest_checkpoint(
|
|
626
|
+
self,
|
|
627
|
+
run_id: str,
|
|
628
|
+
branch_id: Optional[str] = None,
|
|
629
|
+
) -> Optional["Checkpoint"]:
|
|
630
|
+
"""
|
|
631
|
+
Get the most recent checkpoint for a workflow run.
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
run_id: The workflow run identifier
|
|
635
|
+
branch_id: Optional branch to filter by
|
|
636
|
+
|
|
637
|
+
Returns:
|
|
638
|
+
The latest checkpoint, or None if no checkpoints exist
|
|
639
|
+
"""
|
|
640
|
+
raise NotImplementedError(
|
|
641
|
+
f"{self.__class__.__name__} does not support checkpoints."
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
def get_checkpoints_for_run(
|
|
645
|
+
self,
|
|
646
|
+
run_id: str,
|
|
647
|
+
branch_id: Optional[str] = None,
|
|
648
|
+
limit: int = 100,
|
|
649
|
+
) -> List["Checkpoint"]:
|
|
650
|
+
"""
|
|
651
|
+
Get all checkpoints for a workflow run.
|
|
652
|
+
|
|
653
|
+
Args:
|
|
654
|
+
run_id: The workflow run identifier
|
|
655
|
+
branch_id: Optional branch filter
|
|
656
|
+
limit: Maximum checkpoints to return
|
|
657
|
+
|
|
658
|
+
Returns:
|
|
659
|
+
List of checkpoints ordered by sequence number
|
|
660
|
+
"""
|
|
661
|
+
raise NotImplementedError(
|
|
662
|
+
f"{self.__class__.__name__} does not support checkpoints."
|
|
663
|
+
)
|
|
664
|
+
|
|
665
|
+
def cleanup_checkpoints(
|
|
666
|
+
self,
|
|
667
|
+
run_id: str,
|
|
668
|
+
keep_latest: int = 1,
|
|
669
|
+
) -> int:
|
|
670
|
+
"""
|
|
671
|
+
Clean up old checkpoints for a completed run.
|
|
672
|
+
|
|
673
|
+
Args:
|
|
674
|
+
run_id: The workflow run identifier
|
|
675
|
+
keep_latest: Number of latest checkpoints to keep
|
|
676
|
+
|
|
677
|
+
Returns:
|
|
678
|
+
Number of checkpoints deleted
|
|
679
|
+
"""
|
|
680
|
+
raise NotImplementedError(
|
|
681
|
+
f"{self.__class__.__name__} does not support checkpoints."
|
|
682
|
+
)
|
|
683
|
+
|
|
684
|
+
# ==================== WORKFLOW OUTCOME OPERATIONS (v0.6.0+) ====================
|
|
685
|
+
|
|
686
|
+
def save_workflow_outcome(self, outcome: "WorkflowOutcome") -> str:
|
|
687
|
+
"""
|
|
688
|
+
Save a workflow outcome.
|
|
689
|
+
|
|
690
|
+
Args:
|
|
691
|
+
outcome: WorkflowOutcome to save
|
|
692
|
+
|
|
693
|
+
Returns:
|
|
694
|
+
The outcome ID
|
|
695
|
+
"""
|
|
696
|
+
raise NotImplementedError(
|
|
697
|
+
f"{self.__class__.__name__} does not support workflow outcomes."
|
|
698
|
+
)
|
|
699
|
+
|
|
700
|
+
def get_workflow_outcome(self, outcome_id: str) -> Optional["WorkflowOutcome"]:
|
|
701
|
+
"""
|
|
702
|
+
Get a workflow outcome by ID.
|
|
703
|
+
|
|
704
|
+
Args:
|
|
705
|
+
outcome_id: The outcome ID
|
|
706
|
+
|
|
707
|
+
Returns:
|
|
708
|
+
The workflow outcome, or None if not found
|
|
709
|
+
"""
|
|
710
|
+
raise NotImplementedError(
|
|
711
|
+
f"{self.__class__.__name__} does not support workflow outcomes."
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
def get_workflow_outcomes(
|
|
715
|
+
self,
|
|
716
|
+
project_id: str,
|
|
717
|
+
agent: Optional[str] = None,
|
|
718
|
+
workflow_id: Optional[str] = None,
|
|
719
|
+
embedding: Optional[List[float]] = None,
|
|
720
|
+
top_k: int = 10,
|
|
721
|
+
scope_filter: Optional[Dict[str, Any]] = None,
|
|
722
|
+
) -> List["WorkflowOutcome"]:
|
|
723
|
+
"""
|
|
724
|
+
Get workflow outcomes with optional filtering.
|
|
725
|
+
|
|
726
|
+
Args:
|
|
727
|
+
project_id: Project to query
|
|
728
|
+
agent: Filter by agent
|
|
729
|
+
workflow_id: Filter by workflow definition
|
|
730
|
+
embedding: Query embedding for semantic search
|
|
731
|
+
top_k: Max results
|
|
732
|
+
scope_filter: Optional workflow scope filter
|
|
733
|
+
|
|
734
|
+
Returns:
|
|
735
|
+
List of matching workflow outcomes
|
|
736
|
+
"""
|
|
737
|
+
raise NotImplementedError(
|
|
738
|
+
f"{self.__class__.__name__} does not support workflow outcomes."
|
|
739
|
+
)
|
|
740
|
+
|
|
741
|
+
# ==================== ARTIFACT LINK OPERATIONS (v0.6.0+) ====================
|
|
742
|
+
|
|
743
|
+
def save_artifact_link(self, artifact_ref: "ArtifactRef") -> str:
|
|
744
|
+
"""
|
|
745
|
+
Save an artifact reference linked to a memory.
|
|
746
|
+
|
|
747
|
+
Args:
|
|
748
|
+
artifact_ref: ArtifactRef to save
|
|
749
|
+
|
|
750
|
+
Returns:
|
|
751
|
+
The artifact reference ID
|
|
752
|
+
"""
|
|
753
|
+
raise NotImplementedError(
|
|
754
|
+
f"{self.__class__.__name__} does not support artifact links."
|
|
755
|
+
)
|
|
756
|
+
|
|
757
|
+
def get_artifact_links(
|
|
758
|
+
self,
|
|
759
|
+
memory_id: str,
|
|
760
|
+
) -> List["ArtifactRef"]:
|
|
761
|
+
"""
|
|
762
|
+
Get all artifact references linked to a memory.
|
|
763
|
+
|
|
764
|
+
Args:
|
|
765
|
+
memory_id: The memory ID to get artifacts for
|
|
766
|
+
|
|
767
|
+
Returns:
|
|
768
|
+
List of artifact references
|
|
769
|
+
"""
|
|
770
|
+
raise NotImplementedError(
|
|
771
|
+
f"{self.__class__.__name__} does not support artifact links."
|
|
772
|
+
)
|
|
773
|
+
|
|
774
|
+
def delete_artifact_link(self, artifact_id: str) -> bool:
|
|
775
|
+
"""
|
|
776
|
+
Delete an artifact reference.
|
|
777
|
+
|
|
778
|
+
Args:
|
|
779
|
+
artifact_id: The artifact reference ID
|
|
780
|
+
|
|
781
|
+
Returns:
|
|
782
|
+
True if deleted, False if not found
|
|
783
|
+
"""
|
|
784
|
+
raise NotImplementedError(
|
|
785
|
+
f"{self.__class__.__name__} does not support artifact links."
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
# ==================== SESSION HANDOFFS ====================
|
|
789
|
+
|
|
790
|
+
def save_session_handoff(self, handoff: "SessionHandoff") -> str:
|
|
791
|
+
"""
|
|
792
|
+
Save a session handoff for persistence across restarts.
|
|
793
|
+
|
|
794
|
+
Args:
|
|
795
|
+
handoff: SessionHandoff to save
|
|
796
|
+
|
|
797
|
+
Returns:
|
|
798
|
+
The handoff ID
|
|
799
|
+
|
|
800
|
+
Note: Default implementation raises NotImplementedError.
|
|
801
|
+
"""
|
|
802
|
+
raise NotImplementedError(
|
|
803
|
+
f"{self.__class__.__name__} does not support session handoffs."
|
|
804
|
+
)
|
|
805
|
+
|
|
806
|
+
def get_session_handoffs(
|
|
807
|
+
self,
|
|
808
|
+
project_id: str,
|
|
809
|
+
agent: str,
|
|
810
|
+
limit: int = 50,
|
|
811
|
+
) -> List["SessionHandoff"]:
|
|
812
|
+
"""
|
|
813
|
+
Get session handoffs for an agent, most recent first.
|
|
814
|
+
|
|
815
|
+
Args:
|
|
816
|
+
project_id: Project identifier
|
|
817
|
+
agent: Agent identifier
|
|
818
|
+
limit: Maximum number of handoffs to return
|
|
819
|
+
|
|
820
|
+
Returns:
|
|
821
|
+
List of SessionHandoff, most recent first
|
|
822
|
+
"""
|
|
823
|
+
raise NotImplementedError(
|
|
824
|
+
f"{self.__class__.__name__} does not support session handoffs."
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
def get_latest_session_handoff(
|
|
828
|
+
self,
|
|
829
|
+
project_id: str,
|
|
830
|
+
agent: str,
|
|
831
|
+
) -> Optional["SessionHandoff"]:
|
|
832
|
+
"""
|
|
833
|
+
Get the most recent session handoff for an agent.
|
|
834
|
+
|
|
835
|
+
Args:
|
|
836
|
+
project_id: Project identifier
|
|
837
|
+
agent: Agent identifier
|
|
838
|
+
|
|
839
|
+
Returns:
|
|
840
|
+
Most recent SessionHandoff or None
|
|
841
|
+
"""
|
|
842
|
+
handoffs = self.get_session_handoffs(project_id, agent, limit=1)
|
|
843
|
+
return handoffs[0] if handoffs else None
|
|
844
|
+
|
|
845
|
+
def delete_session_handoffs(
|
|
846
|
+
self,
|
|
847
|
+
project_id: str,
|
|
848
|
+
agent: Optional[str] = None,
|
|
849
|
+
) -> int:
|
|
850
|
+
"""
|
|
851
|
+
Delete session handoffs.
|
|
852
|
+
|
|
853
|
+
Args:
|
|
854
|
+
project_id: Project identifier
|
|
855
|
+
agent: If provided, only delete for this agent
|
|
856
|
+
|
|
857
|
+
Returns:
|
|
858
|
+
Number of handoffs deleted
|
|
859
|
+
"""
|
|
860
|
+
raise NotImplementedError(
|
|
861
|
+
f"{self.__class__.__name__} does not support session handoffs."
|
|
862
|
+
)
|
|
863
|
+
|
|
864
|
+
# ==================== MEMORY STRENGTH OPERATIONS (v0.7.0+) ====================
|
|
865
|
+
|
|
866
|
+
def save_memory_strength(self, strength: Any) -> str:
|
|
867
|
+
"""
|
|
868
|
+
Save or update a memory strength record.
|
|
869
|
+
|
|
870
|
+
Args:
|
|
871
|
+
strength: MemoryStrength instance to save
|
|
872
|
+
|
|
873
|
+
Returns:
|
|
874
|
+
The memory ID
|
|
875
|
+
|
|
876
|
+
Note: Default implementation raises NotImplementedError.
|
|
877
|
+
Backends should override for decay-based forgetting support.
|
|
878
|
+
"""
|
|
879
|
+
raise NotImplementedError(
|
|
880
|
+
f"{self.__class__.__name__} does not support memory strength tracking. "
|
|
881
|
+
"Use SQLiteStorage for decay-based forgetting features."
|
|
882
|
+
)
|
|
883
|
+
|
|
884
|
+
def get_memory_strength(self, memory_id: str) -> Optional[Any]:
|
|
885
|
+
"""
|
|
886
|
+
Get a memory strength record by memory ID.
|
|
887
|
+
|
|
888
|
+
Args:
|
|
889
|
+
memory_id: The memory ID to look up
|
|
890
|
+
|
|
891
|
+
Returns:
|
|
892
|
+
MemoryStrength instance, or None if not found
|
|
893
|
+
"""
|
|
894
|
+
raise NotImplementedError(
|
|
895
|
+
f"{self.__class__.__name__} does not support memory strength tracking."
|
|
896
|
+
)
|
|
897
|
+
|
|
898
|
+
def get_all_memory_strengths(
|
|
899
|
+
self,
|
|
900
|
+
project_id: str,
|
|
901
|
+
agent: Optional[str] = None,
|
|
902
|
+
) -> List[Any]:
|
|
903
|
+
"""
|
|
904
|
+
Get all memory strength records for a project/agent.
|
|
905
|
+
|
|
906
|
+
Args:
|
|
907
|
+
project_id: Project to query
|
|
908
|
+
agent: Optional agent filter
|
|
909
|
+
|
|
910
|
+
Returns:
|
|
911
|
+
List of MemoryStrength instances
|
|
912
|
+
"""
|
|
913
|
+
raise NotImplementedError(
|
|
914
|
+
f"{self.__class__.__name__} does not support memory strength tracking."
|
|
915
|
+
)
|
|
916
|
+
|
|
917
|
+
def delete_memory_strength(self, memory_id: str) -> bool:
|
|
918
|
+
"""
|
|
919
|
+
Delete a memory strength record.
|
|
920
|
+
|
|
921
|
+
Args:
|
|
922
|
+
memory_id: The memory ID
|
|
923
|
+
|
|
924
|
+
Returns:
|
|
925
|
+
True if deleted, False if not found
|
|
926
|
+
"""
|
|
927
|
+
raise NotImplementedError(
|
|
928
|
+
f"{self.__class__.__name__} does not support memory strength tracking."
|
|
929
|
+
)
|
|
930
|
+
|
|
931
|
+
# ==================== ARCHIVE OPERATIONS (v0.7.0+) ====================
|
|
932
|
+
|
|
933
|
+
def archive_memory(
|
|
934
|
+
self,
|
|
935
|
+
memory_id: str,
|
|
936
|
+
memory_type: str,
|
|
937
|
+
reason: str,
|
|
938
|
+
final_strength: float,
|
|
939
|
+
) -> Any:
|
|
940
|
+
"""
|
|
941
|
+
Archive a memory before deletion.
|
|
942
|
+
|
|
943
|
+
Captures full memory data including content, embedding, and metadata
|
|
944
|
+
for potential future recovery or compliance auditing.
|
|
945
|
+
|
|
946
|
+
Args:
|
|
947
|
+
memory_id: ID of the memory to archive
|
|
948
|
+
memory_type: Type of memory (heuristic, outcome, etc.)
|
|
949
|
+
reason: Why being archived (decay, manual, consolidation, etc.)
|
|
950
|
+
final_strength: Memory strength at time of archival
|
|
951
|
+
|
|
952
|
+
Returns:
|
|
953
|
+
ArchivedMemory instance
|
|
954
|
+
|
|
955
|
+
Note: Default implementation raises NotImplementedError.
|
|
956
|
+
Backends should override for archive support.
|
|
957
|
+
"""
|
|
958
|
+
raise NotImplementedError(
|
|
959
|
+
f"{self.__class__.__name__} does not support memory archiving. "
|
|
960
|
+
"Use SQLiteStorage for archive features."
|
|
961
|
+
)
|
|
962
|
+
|
|
963
|
+
def get_archive(self, archive_id: str) -> Optional[Any]:
|
|
964
|
+
"""
|
|
965
|
+
Get an archived memory by its archive ID.
|
|
966
|
+
|
|
967
|
+
Args:
|
|
968
|
+
archive_id: The archive ID
|
|
969
|
+
|
|
970
|
+
Returns:
|
|
971
|
+
ArchivedMemory instance, or None if not found
|
|
972
|
+
"""
|
|
973
|
+
raise NotImplementedError(
|
|
974
|
+
f"{self.__class__.__name__} does not support memory archiving."
|
|
975
|
+
)
|
|
976
|
+
|
|
977
|
+
def list_archives(
|
|
978
|
+
self,
|
|
979
|
+
project_id: str,
|
|
980
|
+
agent: Optional[str] = None,
|
|
981
|
+
reason: Optional[str] = None,
|
|
982
|
+
memory_type: Optional[str] = None,
|
|
983
|
+
older_than: Optional[datetime] = None,
|
|
984
|
+
younger_than: Optional[datetime] = None,
|
|
985
|
+
include_restored: bool = False,
|
|
986
|
+
limit: int = 100,
|
|
987
|
+
) -> List[Any]:
|
|
988
|
+
"""
|
|
989
|
+
List archived memories with filtering.
|
|
990
|
+
|
|
991
|
+
Args:
|
|
992
|
+
project_id: Project to query
|
|
993
|
+
agent: Optional agent filter
|
|
994
|
+
reason: Optional archive reason filter
|
|
995
|
+
memory_type: Optional memory type filter
|
|
996
|
+
older_than: Optional filter for archives older than this time
|
|
997
|
+
younger_than: Optional filter for archives younger than this time
|
|
998
|
+
include_restored: Whether to include archives that have been restored
|
|
999
|
+
limit: Maximum number of archives to return
|
|
1000
|
+
|
|
1001
|
+
Returns:
|
|
1002
|
+
List of ArchivedMemory instances
|
|
1003
|
+
"""
|
|
1004
|
+
raise NotImplementedError(
|
|
1005
|
+
f"{self.__class__.__name__} does not support memory archiving."
|
|
1006
|
+
)
|
|
1007
|
+
|
|
1008
|
+
def restore_from_archive(self, archive_id: str) -> str:
|
|
1009
|
+
"""
|
|
1010
|
+
Restore an archived memory, creating a new memory from archive data.
|
|
1011
|
+
|
|
1012
|
+
The original archive is marked as restored but retained for audit purposes.
|
|
1013
|
+
|
|
1014
|
+
Args:
|
|
1015
|
+
archive_id: The archive ID to restore
|
|
1016
|
+
|
|
1017
|
+
Returns:
|
|
1018
|
+
New memory ID of the restored memory
|
|
1019
|
+
|
|
1020
|
+
Raises:
|
|
1021
|
+
ValueError: If archive not found or already restored
|
|
1022
|
+
"""
|
|
1023
|
+
raise NotImplementedError(
|
|
1024
|
+
f"{self.__class__.__name__} does not support memory archiving."
|
|
1025
|
+
)
|
|
1026
|
+
|
|
1027
|
+
def purge_archives(
|
|
1028
|
+
self,
|
|
1029
|
+
older_than: datetime,
|
|
1030
|
+
project_id: Optional[str] = None,
|
|
1031
|
+
reason: Optional[str] = None,
|
|
1032
|
+
) -> int:
|
|
1033
|
+
"""
|
|
1034
|
+
Permanently delete archived memories.
|
|
1035
|
+
|
|
1036
|
+
This is a destructive operation - archives cannot be recovered after purging.
|
|
1037
|
+
|
|
1038
|
+
Args:
|
|
1039
|
+
older_than: Delete archives older than this datetime
|
|
1040
|
+
project_id: Optional project filter
|
|
1041
|
+
reason: Optional reason filter
|
|
1042
|
+
|
|
1043
|
+
Returns:
|
|
1044
|
+
Number of archives permanently deleted
|
|
1045
|
+
"""
|
|
1046
|
+
raise NotImplementedError(
|
|
1047
|
+
f"{self.__class__.__name__} does not support memory archiving."
|
|
1048
|
+
)
|
|
1049
|
+
|
|
1050
|
+
def get_archive_stats(
|
|
1051
|
+
self,
|
|
1052
|
+
project_id: str,
|
|
1053
|
+
agent: Optional[str] = None,
|
|
1054
|
+
) -> Dict[str, Any]:
|
|
1055
|
+
"""
|
|
1056
|
+
Get statistics about archived memories.
|
|
1057
|
+
|
|
1058
|
+
Args:
|
|
1059
|
+
project_id: Project to query
|
|
1060
|
+
agent: Optional agent filter
|
|
1061
|
+
|
|
1062
|
+
Returns:
|
|
1063
|
+
Dict with archive statistics (counts, by reason, by type, etc.)
|
|
1064
|
+
"""
|
|
1065
|
+
raise NotImplementedError(
|
|
1066
|
+
f"{self.__class__.__name__} does not support memory archiving."
|
|
1067
|
+
)
|
|
1068
|
+
|
|
1069
|
+
# ==================== UTILITY ====================
|
|
1070
|
+
|
|
1071
|
+
@classmethod
|
|
1072
|
+
@abstractmethod
|
|
1073
|
+
def from_config(cls, config: Dict[str, Any]) -> "StorageBackend":
|
|
1074
|
+
"""
|
|
1075
|
+
Create instance from configuration dict.
|
|
1076
|
+
|
|
1077
|
+
Args:
|
|
1078
|
+
config: Configuration dictionary
|
|
1079
|
+
|
|
1080
|
+
Returns:
|
|
1081
|
+
Configured storage backend instance
|
|
1082
|
+
"""
|
|
1083
|
+
pass
|