alma-memory 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alma/__init__.py +296 -194
- alma/compression/__init__.py +33 -0
- alma/compression/pipeline.py +980 -0
- alma/confidence/__init__.py +47 -47
- alma/confidence/engine.py +540 -540
- alma/confidence/types.py +351 -351
- alma/config/loader.py +157 -157
- alma/consolidation/__init__.py +23 -23
- alma/consolidation/engine.py +678 -678
- alma/consolidation/prompts.py +84 -84
- alma/core.py +1189 -322
- alma/domains/__init__.py +30 -30
- alma/domains/factory.py +359 -359
- alma/domains/schemas.py +448 -448
- alma/domains/types.py +272 -272
- alma/events/__init__.py +75 -75
- alma/events/emitter.py +285 -284
- alma/events/storage_mixin.py +246 -246
- alma/events/types.py +126 -126
- alma/events/webhook.py +425 -425
- alma/exceptions.py +49 -49
- alma/extraction/__init__.py +31 -31
- alma/extraction/auto_learner.py +265 -264
- alma/extraction/extractor.py +420 -420
- alma/graph/__init__.py +106 -81
- alma/graph/backends/__init__.py +32 -18
- alma/graph/backends/kuzu.py +624 -0
- alma/graph/backends/memgraph.py +432 -0
- alma/graph/backends/memory.py +236 -236
- alma/graph/backends/neo4j.py +417 -417
- alma/graph/base.py +159 -159
- alma/graph/extraction.py +198 -198
- alma/graph/store.py +860 -860
- alma/harness/__init__.py +35 -35
- alma/harness/base.py +386 -386
- alma/harness/domains.py +705 -705
- alma/initializer/__init__.py +37 -37
- alma/initializer/initializer.py +418 -418
- alma/initializer/types.py +250 -250
- alma/integration/__init__.py +62 -62
- alma/integration/claude_agents.py +444 -432
- alma/integration/helena.py +423 -423
- alma/integration/victor.py +471 -471
- alma/learning/__init__.py +101 -86
- alma/learning/decay.py +878 -0
- alma/learning/forgetting.py +1446 -1446
- alma/learning/heuristic_extractor.py +390 -390
- alma/learning/protocols.py +374 -374
- alma/learning/validation.py +346 -346
- alma/mcp/__init__.py +123 -45
- alma/mcp/__main__.py +156 -156
- alma/mcp/resources.py +122 -122
- alma/mcp/server.py +955 -591
- alma/mcp/tools.py +3254 -511
- alma/observability/__init__.py +91 -0
- alma/observability/config.py +302 -0
- alma/observability/guidelines.py +170 -0
- alma/observability/logging.py +424 -0
- alma/observability/metrics.py +583 -0
- alma/observability/tracing.py +440 -0
- alma/progress/__init__.py +21 -21
- alma/progress/tracker.py +607 -607
- alma/progress/types.py +250 -250
- alma/retrieval/__init__.py +134 -53
- alma/retrieval/budget.py +525 -0
- alma/retrieval/cache.py +1304 -1061
- alma/retrieval/embeddings.py +202 -202
- alma/retrieval/engine.py +850 -366
- alma/retrieval/modes.py +365 -0
- alma/retrieval/progressive.py +560 -0
- alma/retrieval/scoring.py +344 -344
- alma/retrieval/trust_scoring.py +637 -0
- alma/retrieval/verification.py +797 -0
- alma/session/__init__.py +19 -19
- alma/session/manager.py +442 -399
- alma/session/types.py +288 -288
- alma/storage/__init__.py +101 -61
- alma/storage/archive.py +233 -0
- alma/storage/azure_cosmos.py +1259 -1048
- alma/storage/base.py +1083 -525
- alma/storage/chroma.py +1443 -1443
- alma/storage/constants.py +103 -0
- alma/storage/file_based.py +614 -619
- alma/storage/migrations/__init__.py +21 -0
- alma/storage/migrations/base.py +321 -0
- alma/storage/migrations/runner.py +323 -0
- alma/storage/migrations/version_stores.py +337 -0
- alma/storage/migrations/versions/__init__.py +11 -0
- alma/storage/migrations/versions/v1_0_0.py +373 -0
- alma/storage/migrations/versions/v1_1_0_workflow_context.py +551 -0
- alma/storage/pinecone.py +1080 -1080
- alma/storage/postgresql.py +1948 -1452
- alma/storage/qdrant.py +1306 -1306
- alma/storage/sqlite_local.py +3041 -1358
- alma/testing/__init__.py +46 -0
- alma/testing/factories.py +301 -0
- alma/testing/mocks.py +389 -0
- alma/types.py +292 -264
- alma/utils/__init__.py +19 -0
- alma/utils/tokenizer.py +521 -0
- alma/workflow/__init__.py +83 -0
- alma/workflow/artifacts.py +170 -0
- alma/workflow/checkpoint.py +311 -0
- alma/workflow/context.py +228 -0
- alma/workflow/outcomes.py +189 -0
- alma/workflow/reducers.py +393 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/METADATA +244 -72
- alma_memory-0.7.0.dist-info/RECORD +112 -0
- alma_memory-0.5.0.dist-info/RECORD +0 -76
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/WHEEL +0 -0
- {alma_memory-0.5.0.dist-info → alma_memory-0.7.0.dist-info}/top_level.txt +0 -0
alma/storage/file_based.py
CHANGED
|
@@ -1,619 +1,614 @@
|
|
|
1
|
-
"""
|
|
2
|
-
ALMA File-Based Storage Backend.
|
|
3
|
-
|
|
4
|
-
Simple JSON file storage for testing and fallback scenarios.
|
|
5
|
-
No vector search - uses basic text matching for retrieval.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import json
|
|
9
|
-
import logging
|
|
10
|
-
from datetime import datetime, timezone
|
|
11
|
-
from pathlib import Path
|
|
12
|
-
from typing import Any, Dict, List, Optional
|
|
13
|
-
|
|
14
|
-
from alma.storage.base import StorageBackend
|
|
15
|
-
from alma.
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
├──
|
|
34
|
-
├──
|
|
35
|
-
├──
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
self.storage_dir
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
self
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
self
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
self
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
self
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
self
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
""
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
""
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
""
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
""
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
if
|
|
423
|
-
filtered.append(record)
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
"
|
|
472
|
-
"
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
def
|
|
507
|
-
"""
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
created_at=self._parse_datetime(record.get("created_at"))
|
|
616
|
-
or datetime.now(timezone.utc),
|
|
617
|
-
embedding=record.get("embedding"),
|
|
618
|
-
metadata=record.get("metadata", {}),
|
|
619
|
-
)
|
|
1
|
+
"""
|
|
2
|
+
ALMA File-Based Storage Backend.
|
|
3
|
+
|
|
4
|
+
Simple JSON file storage for testing and fallback scenarios.
|
|
5
|
+
No vector search - uses basic text matching for retrieval.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Any, Dict, List, Optional
|
|
13
|
+
|
|
14
|
+
from alma.storage.base import StorageBackend
|
|
15
|
+
from alma.storage.constants import MemoryType
|
|
16
|
+
from alma.types import (
|
|
17
|
+
AntiPattern,
|
|
18
|
+
DomainKnowledge,
|
|
19
|
+
Heuristic,
|
|
20
|
+
Outcome,
|
|
21
|
+
UserPreference,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class FileBasedStorage(StorageBackend):
|
|
28
|
+
"""
|
|
29
|
+
File-based storage using JSON files.
|
|
30
|
+
|
|
31
|
+
Structure:
|
|
32
|
+
.alma/
|
|
33
|
+
├── heuristics.json
|
|
34
|
+
├── outcomes.json
|
|
35
|
+
├── preferences.json
|
|
36
|
+
├── domain_knowledge.json
|
|
37
|
+
└── anti_patterns.json
|
|
38
|
+
|
|
39
|
+
Note: This backend does NOT support vector search.
|
|
40
|
+
Use SQLiteStorage or AzureCosmosStorage for semantic retrieval.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def __init__(self, storage_dir: Path):
|
|
44
|
+
"""
|
|
45
|
+
Initialize file-based storage.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
storage_dir: Directory to store JSON files
|
|
49
|
+
"""
|
|
50
|
+
self.storage_dir = Path(storage_dir)
|
|
51
|
+
self.storage_dir.mkdir(parents=True, exist_ok=True)
|
|
52
|
+
|
|
53
|
+
# File paths (using canonical memory type names)
|
|
54
|
+
self._files = {mt: self.storage_dir / f"{mt}.json" for mt in MemoryType.ALL}
|
|
55
|
+
|
|
56
|
+
# Initialize empty files if they don't exist
|
|
57
|
+
for file_path in self._files.values():
|
|
58
|
+
if not file_path.exists():
|
|
59
|
+
self._write_json(file_path, [])
|
|
60
|
+
|
|
61
|
+
@classmethod
|
|
62
|
+
def from_config(cls, config: Dict[str, Any]) -> "FileBasedStorage":
|
|
63
|
+
"""Create instance from configuration."""
|
|
64
|
+
storage_dir = config.get("storage_dir", ".alma")
|
|
65
|
+
return cls(storage_dir=Path(storage_dir))
|
|
66
|
+
|
|
67
|
+
# ==================== WRITE OPERATIONS ====================
|
|
68
|
+
|
|
69
|
+
def save_heuristic(self, heuristic: Heuristic) -> str:
|
|
70
|
+
"""Save a heuristic (UPSERT - update if exists, insert if new)."""
|
|
71
|
+
data = self._read_json(self._files["heuristics"])
|
|
72
|
+
record = self._to_dict(heuristic)
|
|
73
|
+
# Find and replace existing, or append new
|
|
74
|
+
found = False
|
|
75
|
+
for i, existing in enumerate(data):
|
|
76
|
+
if existing.get("id") == record["id"]:
|
|
77
|
+
data[i] = record
|
|
78
|
+
found = True
|
|
79
|
+
break
|
|
80
|
+
if not found:
|
|
81
|
+
data.append(record)
|
|
82
|
+
self._write_json(self._files["heuristics"], data)
|
|
83
|
+
logger.debug(f"Saved heuristic: {heuristic.id}")
|
|
84
|
+
return heuristic.id
|
|
85
|
+
|
|
86
|
+
def save_outcome(self, outcome: Outcome) -> str:
|
|
87
|
+
"""Save an outcome (UPSERT - update if exists, insert if new)."""
|
|
88
|
+
data = self._read_json(self._files["outcomes"])
|
|
89
|
+
record = self._to_dict(outcome)
|
|
90
|
+
# Find and replace existing, or append new
|
|
91
|
+
found = False
|
|
92
|
+
for i, existing in enumerate(data):
|
|
93
|
+
if existing.get("id") == record["id"]:
|
|
94
|
+
data[i] = record
|
|
95
|
+
found = True
|
|
96
|
+
break
|
|
97
|
+
if not found:
|
|
98
|
+
data.append(record)
|
|
99
|
+
self._write_json(self._files["outcomes"], data)
|
|
100
|
+
logger.debug(f"Saved outcome: {outcome.id}")
|
|
101
|
+
return outcome.id
|
|
102
|
+
|
|
103
|
+
def save_user_preference(self, preference: UserPreference) -> str:
|
|
104
|
+
"""Save a user preference (UPSERT - update if exists, insert if new)."""
|
|
105
|
+
data = self._read_json(self._files["preferences"])
|
|
106
|
+
record = self._to_dict(preference)
|
|
107
|
+
# Find and replace existing, or append new
|
|
108
|
+
found = False
|
|
109
|
+
for i, existing in enumerate(data):
|
|
110
|
+
if existing.get("id") == record["id"]:
|
|
111
|
+
data[i] = record
|
|
112
|
+
found = True
|
|
113
|
+
break
|
|
114
|
+
if not found:
|
|
115
|
+
data.append(record)
|
|
116
|
+
self._write_json(self._files["preferences"], data)
|
|
117
|
+
logger.debug(f"Saved preference: {preference.id}")
|
|
118
|
+
return preference.id
|
|
119
|
+
|
|
120
|
+
def save_domain_knowledge(self, knowledge: DomainKnowledge) -> str:
|
|
121
|
+
"""Save domain knowledge (UPSERT - update if exists, insert if new)."""
|
|
122
|
+
data = self._read_json(self._files["domain_knowledge"])
|
|
123
|
+
record = self._to_dict(knowledge)
|
|
124
|
+
# Find and replace existing, or append new
|
|
125
|
+
found = False
|
|
126
|
+
for i, existing in enumerate(data):
|
|
127
|
+
if existing.get("id") == record["id"]:
|
|
128
|
+
data[i] = record
|
|
129
|
+
found = True
|
|
130
|
+
break
|
|
131
|
+
if not found:
|
|
132
|
+
data.append(record)
|
|
133
|
+
self._write_json(self._files["domain_knowledge"], data)
|
|
134
|
+
logger.debug(f"Saved domain knowledge: {knowledge.id}")
|
|
135
|
+
return knowledge.id
|
|
136
|
+
|
|
137
|
+
def save_anti_pattern(self, anti_pattern: AntiPattern) -> str:
|
|
138
|
+
"""Save an anti-pattern (UPSERT - update if exists, insert if new)."""
|
|
139
|
+
data = self._read_json(self._files["anti_patterns"])
|
|
140
|
+
record = self._to_dict(anti_pattern)
|
|
141
|
+
# Find and replace existing, or append new
|
|
142
|
+
found = False
|
|
143
|
+
for i, existing in enumerate(data):
|
|
144
|
+
if existing.get("id") == record["id"]:
|
|
145
|
+
data[i] = record
|
|
146
|
+
found = True
|
|
147
|
+
break
|
|
148
|
+
if not found:
|
|
149
|
+
data.append(record)
|
|
150
|
+
self._write_json(self._files["anti_patterns"], data)
|
|
151
|
+
logger.debug(f"Saved anti-pattern: {anti_pattern.id}")
|
|
152
|
+
return anti_pattern.id
|
|
153
|
+
|
|
154
|
+
# ==================== READ OPERATIONS ====================
|
|
155
|
+
|
|
156
|
+
def get_heuristics(
|
|
157
|
+
self,
|
|
158
|
+
project_id: str,
|
|
159
|
+
agent: Optional[str] = None,
|
|
160
|
+
embedding: Optional[List[float]] = None,
|
|
161
|
+
top_k: int = 5,
|
|
162
|
+
min_confidence: float = 0.0,
|
|
163
|
+
) -> List[Heuristic]:
|
|
164
|
+
"""Get heuristics (no vector search - returns all matching filters)."""
|
|
165
|
+
data = self._read_json(self._files["heuristics"])
|
|
166
|
+
|
|
167
|
+
# Filter
|
|
168
|
+
results = []
|
|
169
|
+
for record in data:
|
|
170
|
+
if record.get("project_id") != project_id:
|
|
171
|
+
continue
|
|
172
|
+
if agent and record.get("agent") != agent:
|
|
173
|
+
continue
|
|
174
|
+
if record.get("confidence", 0) < min_confidence:
|
|
175
|
+
continue
|
|
176
|
+
results.append(self._to_heuristic(record))
|
|
177
|
+
|
|
178
|
+
# Sort by confidence and return top_k
|
|
179
|
+
results.sort(key=lambda x: -x.confidence)
|
|
180
|
+
return results[:top_k]
|
|
181
|
+
|
|
182
|
+
def get_outcomes(
|
|
183
|
+
self,
|
|
184
|
+
project_id: str,
|
|
185
|
+
agent: Optional[str] = None,
|
|
186
|
+
task_type: Optional[str] = None,
|
|
187
|
+
embedding: Optional[List[float]] = None,
|
|
188
|
+
top_k: int = 5,
|
|
189
|
+
success_only: bool = False,
|
|
190
|
+
) -> List[Outcome]:
|
|
191
|
+
"""Get outcomes (no vector search)."""
|
|
192
|
+
data = self._read_json(self._files["outcomes"])
|
|
193
|
+
|
|
194
|
+
results = []
|
|
195
|
+
for record in data:
|
|
196
|
+
if record.get("project_id") != project_id:
|
|
197
|
+
continue
|
|
198
|
+
if agent and record.get("agent") != agent:
|
|
199
|
+
continue
|
|
200
|
+
if task_type and record.get("task_type") != task_type:
|
|
201
|
+
continue
|
|
202
|
+
if success_only and not record.get("success"):
|
|
203
|
+
continue
|
|
204
|
+
results.append(self._to_outcome(record))
|
|
205
|
+
|
|
206
|
+
# Sort by timestamp (most recent first) and return top_k
|
|
207
|
+
results.sort(key=lambda x: x.timestamp, reverse=True)
|
|
208
|
+
return results[:top_k]
|
|
209
|
+
|
|
210
|
+
def get_user_preferences(
|
|
211
|
+
self,
|
|
212
|
+
user_id: str,
|
|
213
|
+
category: Optional[str] = None,
|
|
214
|
+
) -> List[UserPreference]:
|
|
215
|
+
"""Get user preferences."""
|
|
216
|
+
data = self._read_json(self._files["preferences"])
|
|
217
|
+
|
|
218
|
+
results = []
|
|
219
|
+
for record in data:
|
|
220
|
+
if record.get("user_id") != user_id:
|
|
221
|
+
continue
|
|
222
|
+
if category and record.get("category") != category:
|
|
223
|
+
continue
|
|
224
|
+
results.append(self._to_user_preference(record))
|
|
225
|
+
|
|
226
|
+
return results
|
|
227
|
+
|
|
228
|
+
def get_domain_knowledge(
|
|
229
|
+
self,
|
|
230
|
+
project_id: str,
|
|
231
|
+
agent: Optional[str] = None,
|
|
232
|
+
domain: Optional[str] = None,
|
|
233
|
+
embedding: Optional[List[float]] = None,
|
|
234
|
+
top_k: int = 5,
|
|
235
|
+
) -> List[DomainKnowledge]:
|
|
236
|
+
"""Get domain knowledge (no vector search)."""
|
|
237
|
+
data = self._read_json(self._files["domain_knowledge"])
|
|
238
|
+
|
|
239
|
+
results = []
|
|
240
|
+
for record in data:
|
|
241
|
+
if record.get("project_id") != project_id:
|
|
242
|
+
continue
|
|
243
|
+
if agent and record.get("agent") != agent:
|
|
244
|
+
continue
|
|
245
|
+
if domain and record.get("domain") != domain:
|
|
246
|
+
continue
|
|
247
|
+
results.append(self._to_domain_knowledge(record))
|
|
248
|
+
|
|
249
|
+
# Sort by confidence and return top_k
|
|
250
|
+
results.sort(key=lambda x: -x.confidence)
|
|
251
|
+
return results[:top_k]
|
|
252
|
+
|
|
253
|
+
def get_anti_patterns(
|
|
254
|
+
self,
|
|
255
|
+
project_id: str,
|
|
256
|
+
agent: Optional[str] = None,
|
|
257
|
+
embedding: Optional[List[float]] = None,
|
|
258
|
+
top_k: int = 5,
|
|
259
|
+
) -> List[AntiPattern]:
|
|
260
|
+
"""Get anti-patterns (no vector search)."""
|
|
261
|
+
data = self._read_json(self._files["anti_patterns"])
|
|
262
|
+
|
|
263
|
+
results = []
|
|
264
|
+
for record in data:
|
|
265
|
+
if record.get("project_id") != project_id:
|
|
266
|
+
continue
|
|
267
|
+
if agent and record.get("agent") != agent:
|
|
268
|
+
continue
|
|
269
|
+
results.append(self._to_anti_pattern(record))
|
|
270
|
+
|
|
271
|
+
# Sort by occurrence count and return top_k
|
|
272
|
+
results.sort(key=lambda x: -x.occurrence_count)
|
|
273
|
+
return results[:top_k]
|
|
274
|
+
|
|
275
|
+
# ==================== UPDATE OPERATIONS ====================
|
|
276
|
+
|
|
277
|
+
def update_heuristic(
|
|
278
|
+
self,
|
|
279
|
+
heuristic_id: str,
|
|
280
|
+
updates: Dict[str, Any],
|
|
281
|
+
) -> bool:
|
|
282
|
+
"""Update a heuristic's fields."""
|
|
283
|
+
data = self._read_json(self._files["heuristics"])
|
|
284
|
+
|
|
285
|
+
for i, record in enumerate(data):
|
|
286
|
+
if record.get("id") == heuristic_id:
|
|
287
|
+
data[i].update(updates)
|
|
288
|
+
self._write_json(self._files["heuristics"], data)
|
|
289
|
+
return True
|
|
290
|
+
|
|
291
|
+
return False
|
|
292
|
+
|
|
293
|
+
def increment_heuristic_occurrence(
|
|
294
|
+
self,
|
|
295
|
+
heuristic_id: str,
|
|
296
|
+
success: bool,
|
|
297
|
+
) -> bool:
|
|
298
|
+
"""Increment heuristic occurrence count."""
|
|
299
|
+
data = self._read_json(self._files["heuristics"])
|
|
300
|
+
|
|
301
|
+
for i, record in enumerate(data):
|
|
302
|
+
if record.get("id") == heuristic_id:
|
|
303
|
+
data[i]["occurrence_count"] = record.get("occurrence_count", 0) + 1
|
|
304
|
+
if success:
|
|
305
|
+
data[i]["success_count"] = record.get("success_count", 0) + 1
|
|
306
|
+
data[i]["last_validated"] = datetime.now(timezone.utc).isoformat()
|
|
307
|
+
self._write_json(self._files["heuristics"], data)
|
|
308
|
+
return True
|
|
309
|
+
|
|
310
|
+
return False
|
|
311
|
+
|
|
312
|
+
# ==================== UPDATE CONFIDENCE OPERATIONS ====================
|
|
313
|
+
|
|
314
|
+
def update_heuristic_confidence(
|
|
315
|
+
self,
|
|
316
|
+
heuristic_id: str,
|
|
317
|
+
new_confidence: float,
|
|
318
|
+
) -> bool:
|
|
319
|
+
"""Update a heuristic's confidence score."""
|
|
320
|
+
data = self._read_json(self._files["heuristics"])
|
|
321
|
+
|
|
322
|
+
for i, record in enumerate(data):
|
|
323
|
+
if record.get("id") == heuristic_id:
|
|
324
|
+
data[i]["confidence"] = new_confidence
|
|
325
|
+
data[i]["last_validated"] = datetime.now(timezone.utc).isoformat()
|
|
326
|
+
self._write_json(self._files["heuristics"], data)
|
|
327
|
+
return True
|
|
328
|
+
|
|
329
|
+
return False
|
|
330
|
+
|
|
331
|
+
def update_knowledge_confidence(
|
|
332
|
+
self,
|
|
333
|
+
knowledge_id: str,
|
|
334
|
+
new_confidence: float,
|
|
335
|
+
) -> bool:
|
|
336
|
+
"""Update domain knowledge confidence score."""
|
|
337
|
+
data = self._read_json(self._files["domain_knowledge"])
|
|
338
|
+
|
|
339
|
+
for i, record in enumerate(data):
|
|
340
|
+
if record.get("id") == knowledge_id:
|
|
341
|
+
data[i]["confidence"] = new_confidence
|
|
342
|
+
data[i]["last_verified"] = datetime.now(timezone.utc).isoformat()
|
|
343
|
+
self._write_json(self._files["domain_knowledge"], data)
|
|
344
|
+
return True
|
|
345
|
+
|
|
346
|
+
return False
|
|
347
|
+
|
|
348
|
+
# ==================== DELETE OPERATIONS ====================
|
|
349
|
+
|
|
350
|
+
def delete_heuristic(self, heuristic_id: str) -> bool:
|
|
351
|
+
"""Delete a single heuristic by ID."""
|
|
352
|
+
data = self._read_json(self._files["heuristics"])
|
|
353
|
+
original_count = len(data)
|
|
354
|
+
|
|
355
|
+
filtered = [r for r in data if r.get("id") != heuristic_id]
|
|
356
|
+
self._write_json(self._files["heuristics"], filtered)
|
|
357
|
+
|
|
358
|
+
deleted = original_count != len(filtered)
|
|
359
|
+
if deleted:
|
|
360
|
+
logger.debug(f"Deleted heuristic: {heuristic_id}")
|
|
361
|
+
return deleted
|
|
362
|
+
|
|
363
|
+
def delete_outcome(self, outcome_id: str) -> bool:
|
|
364
|
+
"""Delete a single outcome by ID."""
|
|
365
|
+
data = self._read_json(self._files["outcomes"])
|
|
366
|
+
original_count = len(data)
|
|
367
|
+
|
|
368
|
+
filtered = [r for r in data if r.get("id") != outcome_id]
|
|
369
|
+
self._write_json(self._files["outcomes"], filtered)
|
|
370
|
+
|
|
371
|
+
deleted = original_count != len(filtered)
|
|
372
|
+
if deleted:
|
|
373
|
+
logger.debug(f"Deleted outcome: {outcome_id}")
|
|
374
|
+
return deleted
|
|
375
|
+
|
|
376
|
+
def delete_domain_knowledge(self, knowledge_id: str) -> bool:
|
|
377
|
+
"""Delete a single domain knowledge entry by ID."""
|
|
378
|
+
data = self._read_json(self._files["domain_knowledge"])
|
|
379
|
+
original_count = len(data)
|
|
380
|
+
|
|
381
|
+
filtered = [r for r in data if r.get("id") != knowledge_id]
|
|
382
|
+
self._write_json(self._files["domain_knowledge"], filtered)
|
|
383
|
+
|
|
384
|
+
deleted = original_count != len(filtered)
|
|
385
|
+
if deleted:
|
|
386
|
+
logger.debug(f"Deleted domain knowledge: {knowledge_id}")
|
|
387
|
+
return deleted
|
|
388
|
+
|
|
389
|
+
def delete_anti_pattern(self, anti_pattern_id: str) -> bool:
|
|
390
|
+
"""Delete a single anti-pattern by ID."""
|
|
391
|
+
data = self._read_json(self._files["anti_patterns"])
|
|
392
|
+
original_count = len(data)
|
|
393
|
+
|
|
394
|
+
filtered = [r for r in data if r.get("id") != anti_pattern_id]
|
|
395
|
+
self._write_json(self._files["anti_patterns"], filtered)
|
|
396
|
+
|
|
397
|
+
deleted = original_count != len(filtered)
|
|
398
|
+
if deleted:
|
|
399
|
+
logger.debug(f"Deleted anti-pattern: {anti_pattern_id}")
|
|
400
|
+
return deleted
|
|
401
|
+
|
|
402
|
+
def delete_outcomes_older_than(
|
|
403
|
+
self,
|
|
404
|
+
project_id: str,
|
|
405
|
+
older_than: datetime,
|
|
406
|
+
agent: Optional[str] = None,
|
|
407
|
+
) -> int:
|
|
408
|
+
"""Delete old outcomes."""
|
|
409
|
+
data = self._read_json(self._files["outcomes"])
|
|
410
|
+
original_count = len(data)
|
|
411
|
+
|
|
412
|
+
filtered = []
|
|
413
|
+
for record in data:
|
|
414
|
+
if record.get("project_id") != project_id:
|
|
415
|
+
filtered.append(record)
|
|
416
|
+
continue
|
|
417
|
+
if agent and record.get("agent") != agent:
|
|
418
|
+
filtered.append(record)
|
|
419
|
+
continue
|
|
420
|
+
|
|
421
|
+
timestamp = self._parse_datetime(record.get("timestamp"))
|
|
422
|
+
if timestamp and timestamp >= older_than:
|
|
423
|
+
filtered.append(record)
|
|
424
|
+
|
|
425
|
+
self._write_json(self._files["outcomes"], filtered)
|
|
426
|
+
deleted = original_count - len(filtered)
|
|
427
|
+
logger.info(f"Deleted {deleted} old outcomes")
|
|
428
|
+
return deleted
|
|
429
|
+
|
|
430
|
+
def delete_low_confidence_heuristics(
|
|
431
|
+
self,
|
|
432
|
+
project_id: str,
|
|
433
|
+
below_confidence: float,
|
|
434
|
+
agent: Optional[str] = None,
|
|
435
|
+
) -> int:
|
|
436
|
+
"""Delete low-confidence heuristics."""
|
|
437
|
+
data = self._read_json(self._files["heuristics"])
|
|
438
|
+
original_count = len(data)
|
|
439
|
+
|
|
440
|
+
filtered = []
|
|
441
|
+
for record in data:
|
|
442
|
+
if record.get("project_id") != project_id:
|
|
443
|
+
filtered.append(record)
|
|
444
|
+
continue
|
|
445
|
+
if agent and record.get("agent") != agent:
|
|
446
|
+
filtered.append(record)
|
|
447
|
+
continue
|
|
448
|
+
|
|
449
|
+
if record.get("confidence", 0) >= below_confidence:
|
|
450
|
+
filtered.append(record)
|
|
451
|
+
|
|
452
|
+
self._write_json(self._files["heuristics"], filtered)
|
|
453
|
+
deleted = original_count - len(filtered)
|
|
454
|
+
logger.info(f"Deleted {deleted} low-confidence heuristics")
|
|
455
|
+
return deleted
|
|
456
|
+
|
|
457
|
+
# ==================== STATS ====================
|
|
458
|
+
|
|
459
|
+
def get_stats(
|
|
460
|
+
self,
|
|
461
|
+
project_id: str,
|
|
462
|
+
agent: Optional[str] = None,
|
|
463
|
+
) -> Dict[str, Any]:
|
|
464
|
+
"""Get memory statistics."""
|
|
465
|
+
stats = {
|
|
466
|
+
"project_id": project_id,
|
|
467
|
+
"agent": agent,
|
|
468
|
+
"heuristics_count": 0,
|
|
469
|
+
"outcomes_count": 0,
|
|
470
|
+
"preferences_count": 0,
|
|
471
|
+
"domain_knowledge_count": 0,
|
|
472
|
+
"anti_patterns_count": 0,
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
for name, file_path in self._files.items():
|
|
476
|
+
data = self._read_json(file_path)
|
|
477
|
+
count = 0
|
|
478
|
+
for record in data:
|
|
479
|
+
if name == "preferences":
|
|
480
|
+
# Preferences don't have project_id
|
|
481
|
+
count += 1
|
|
482
|
+
elif record.get("project_id") == project_id:
|
|
483
|
+
if agent is None or record.get("agent") == agent:
|
|
484
|
+
count += 1
|
|
485
|
+
stats[f"{name}_count"] = count
|
|
486
|
+
|
|
487
|
+
stats["total_count"] = sum(stats[k] for k in stats if k.endswith("_count"))
|
|
488
|
+
|
|
489
|
+
return stats
|
|
490
|
+
|
|
491
|
+
# ==================== HELPERS ====================
|
|
492
|
+
|
|
493
|
+
def _read_json(self, file_path: Path) -> List[Dict]:
|
|
494
|
+
"""Read JSON file."""
|
|
495
|
+
try:
|
|
496
|
+
with open(file_path, "r") as f:
|
|
497
|
+
return json.load(f)
|
|
498
|
+
except (json.JSONDecodeError, FileNotFoundError):
|
|
499
|
+
return []
|
|
500
|
+
|
|
501
|
+
def _write_json(self, file_path: Path, data: List[Dict]):
|
|
502
|
+
"""Write JSON file."""
|
|
503
|
+
with open(file_path, "w") as f:
|
|
504
|
+
json.dump(data, f, indent=2, default=str)
|
|
505
|
+
|
|
506
|
+
def _to_dict(self, obj: Any) -> Dict:
|
|
507
|
+
"""Convert dataclass to dict with datetime handling."""
|
|
508
|
+
if hasattr(obj, "__dataclass_fields__"):
|
|
509
|
+
result = {}
|
|
510
|
+
for field_name in obj.__dataclass_fields__:
|
|
511
|
+
value = getattr(obj, field_name)
|
|
512
|
+
if isinstance(value, datetime):
|
|
513
|
+
result[field_name] = value.isoformat()
|
|
514
|
+
elif value is not None:
|
|
515
|
+
result[field_name] = value
|
|
516
|
+
return result
|
|
517
|
+
return dict(obj)
|
|
518
|
+
|
|
519
|
+
def _parse_datetime(self, value: Any) -> Optional[datetime]:
|
|
520
|
+
"""Parse datetime from string or return as-is."""
|
|
521
|
+
if value is None:
|
|
522
|
+
return None
|
|
523
|
+
if isinstance(value, datetime):
|
|
524
|
+
return value
|
|
525
|
+
try:
|
|
526
|
+
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
|
527
|
+
except (ValueError, AttributeError):
|
|
528
|
+
return None
|
|
529
|
+
|
|
530
|
+
def _to_heuristic(self, record: Dict) -> Heuristic:
|
|
531
|
+
"""Convert dict to Heuristic."""
|
|
532
|
+
return Heuristic(
|
|
533
|
+
id=record["id"],
|
|
534
|
+
agent=record["agent"],
|
|
535
|
+
project_id=record["project_id"],
|
|
536
|
+
condition=record["condition"],
|
|
537
|
+
strategy=record["strategy"],
|
|
538
|
+
confidence=record.get("confidence", 0.0),
|
|
539
|
+
occurrence_count=record.get("occurrence_count", 0),
|
|
540
|
+
success_count=record.get("success_count", 0),
|
|
541
|
+
last_validated=self._parse_datetime(record.get("last_validated"))
|
|
542
|
+
or datetime.now(timezone.utc),
|
|
543
|
+
created_at=self._parse_datetime(record.get("created_at"))
|
|
544
|
+
or datetime.now(timezone.utc),
|
|
545
|
+
embedding=record.get("embedding"),
|
|
546
|
+
metadata=record.get("metadata", {}),
|
|
547
|
+
)
|
|
548
|
+
|
|
549
|
+
def _to_outcome(self, record: Dict) -> Outcome:
|
|
550
|
+
"""Convert dict to Outcome."""
|
|
551
|
+
return Outcome(
|
|
552
|
+
id=record["id"],
|
|
553
|
+
agent=record["agent"],
|
|
554
|
+
project_id=record["project_id"],
|
|
555
|
+
task_type=record.get("task_type", "general"),
|
|
556
|
+
task_description=record["task_description"],
|
|
557
|
+
success=record.get("success", False),
|
|
558
|
+
strategy_used=record.get("strategy_used", ""),
|
|
559
|
+
duration_ms=record.get("duration_ms"),
|
|
560
|
+
error_message=record.get("error_message"),
|
|
561
|
+
user_feedback=record.get("user_feedback"),
|
|
562
|
+
timestamp=self._parse_datetime(record.get("timestamp"))
|
|
563
|
+
or datetime.now(timezone.utc),
|
|
564
|
+
embedding=record.get("embedding"),
|
|
565
|
+
metadata=record.get("metadata", {}),
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
def _to_user_preference(self, record: Dict) -> UserPreference:
|
|
569
|
+
"""Convert dict to UserPreference."""
|
|
570
|
+
return UserPreference(
|
|
571
|
+
id=record["id"],
|
|
572
|
+
user_id=record["user_id"],
|
|
573
|
+
category=record.get("category", "general"),
|
|
574
|
+
preference=record["preference"],
|
|
575
|
+
source=record.get("source", "unknown"),
|
|
576
|
+
confidence=record.get("confidence", 1.0),
|
|
577
|
+
timestamp=self._parse_datetime(record.get("timestamp"))
|
|
578
|
+
or datetime.now(timezone.utc),
|
|
579
|
+
metadata=record.get("metadata", {}),
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
def _to_domain_knowledge(self, record: Dict) -> DomainKnowledge:
|
|
583
|
+
"""Convert dict to DomainKnowledge."""
|
|
584
|
+
return DomainKnowledge(
|
|
585
|
+
id=record["id"],
|
|
586
|
+
agent=record["agent"],
|
|
587
|
+
project_id=record["project_id"],
|
|
588
|
+
domain=record.get("domain", "general"),
|
|
589
|
+
fact=record["fact"],
|
|
590
|
+
source=record.get("source", "unknown"),
|
|
591
|
+
confidence=record.get("confidence", 1.0),
|
|
592
|
+
last_verified=self._parse_datetime(record.get("last_verified"))
|
|
593
|
+
or datetime.now(timezone.utc),
|
|
594
|
+
embedding=record.get("embedding"),
|
|
595
|
+
metadata=record.get("metadata", {}),
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
def _to_anti_pattern(self, record: Dict) -> AntiPattern:
|
|
599
|
+
"""Convert dict to AntiPattern."""
|
|
600
|
+
return AntiPattern(
|
|
601
|
+
id=record["id"],
|
|
602
|
+
agent=record["agent"],
|
|
603
|
+
project_id=record["project_id"],
|
|
604
|
+
pattern=record["pattern"],
|
|
605
|
+
why_bad=record.get("why_bad", ""),
|
|
606
|
+
better_alternative=record.get("better_alternative", ""),
|
|
607
|
+
occurrence_count=record.get("occurrence_count", 1),
|
|
608
|
+
last_seen=self._parse_datetime(record.get("last_seen"))
|
|
609
|
+
or datetime.now(timezone.utc),
|
|
610
|
+
created_at=self._parse_datetime(record.get("created_at"))
|
|
611
|
+
or datetime.now(timezone.utc),
|
|
612
|
+
embedding=record.get("embedding"),
|
|
613
|
+
metadata=record.get("metadata", {}),
|
|
614
|
+
)
|