alma-memory 0.5.1__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alma/__init__.py +296 -226
- alma/compression/__init__.py +33 -0
- alma/compression/pipeline.py +980 -0
- alma/confidence/__init__.py +47 -47
- alma/confidence/engine.py +540 -540
- alma/confidence/types.py +351 -351
- alma/config/loader.py +157 -157
- alma/consolidation/__init__.py +23 -23
- alma/consolidation/engine.py +678 -678
- alma/consolidation/prompts.py +84 -84
- alma/core.py +1189 -430
- alma/domains/__init__.py +30 -30
- alma/domains/factory.py +359 -359
- alma/domains/schemas.py +448 -448
- alma/domains/types.py +272 -272
- alma/events/__init__.py +75 -75
- alma/events/emitter.py +285 -284
- alma/events/storage_mixin.py +246 -246
- alma/events/types.py +126 -126
- alma/events/webhook.py +425 -425
- alma/exceptions.py +49 -49
- alma/extraction/__init__.py +31 -31
- alma/extraction/auto_learner.py +265 -265
- alma/extraction/extractor.py +420 -420
- alma/graph/__init__.py +106 -106
- alma/graph/backends/__init__.py +32 -32
- alma/graph/backends/kuzu.py +624 -624
- alma/graph/backends/memgraph.py +432 -432
- alma/graph/backends/memory.py +236 -236
- alma/graph/backends/neo4j.py +417 -417
- alma/graph/base.py +159 -159
- alma/graph/extraction.py +198 -198
- alma/graph/store.py +860 -860
- alma/harness/__init__.py +35 -35
- alma/harness/base.py +386 -386
- alma/harness/domains.py +705 -705
- alma/initializer/__init__.py +37 -37
- alma/initializer/initializer.py +418 -418
- alma/initializer/types.py +250 -250
- alma/integration/__init__.py +62 -62
- alma/integration/claude_agents.py +444 -444
- alma/integration/helena.py +423 -423
- alma/integration/victor.py +471 -471
- alma/learning/__init__.py +101 -86
- alma/learning/decay.py +878 -0
- alma/learning/forgetting.py +1446 -1446
- alma/learning/heuristic_extractor.py +390 -390
- alma/learning/protocols.py +374 -374
- alma/learning/validation.py +346 -346
- alma/mcp/__init__.py +123 -45
- alma/mcp/__main__.py +156 -156
- alma/mcp/resources.py +122 -122
- alma/mcp/server.py +955 -591
- alma/mcp/tools.py +3254 -509
- alma/observability/__init__.py +91 -84
- alma/observability/config.py +302 -302
- alma/observability/guidelines.py +170 -0
- alma/observability/logging.py +424 -424
- alma/observability/metrics.py +583 -583
- alma/observability/tracing.py +440 -440
- alma/progress/__init__.py +21 -21
- alma/progress/tracker.py +607 -607
- alma/progress/types.py +250 -250
- alma/retrieval/__init__.py +134 -53
- alma/retrieval/budget.py +525 -0
- alma/retrieval/cache.py +1304 -1061
- alma/retrieval/embeddings.py +202 -202
- alma/retrieval/engine.py +850 -427
- alma/retrieval/modes.py +365 -0
- alma/retrieval/progressive.py +560 -0
- alma/retrieval/scoring.py +344 -344
- alma/retrieval/trust_scoring.py +637 -0
- alma/retrieval/verification.py +797 -0
- alma/session/__init__.py +19 -19
- alma/session/manager.py +442 -399
- alma/session/types.py +288 -288
- alma/storage/__init__.py +101 -90
- alma/storage/archive.py +233 -0
- alma/storage/azure_cosmos.py +1259 -1259
- alma/storage/base.py +1083 -583
- alma/storage/chroma.py +1443 -1443
- alma/storage/constants.py +103 -103
- alma/storage/file_based.py +614 -614
- alma/storage/migrations/__init__.py +21 -21
- alma/storage/migrations/base.py +321 -321
- alma/storage/migrations/runner.py +323 -323
- alma/storage/migrations/version_stores.py +337 -337
- alma/storage/migrations/versions/__init__.py +11 -11
- alma/storage/migrations/versions/v1_0_0.py +373 -373
- alma/storage/migrations/versions/v1_1_0_workflow_context.py +551 -0
- alma/storage/pinecone.py +1080 -1080
- alma/storage/postgresql.py +1948 -1559
- alma/storage/qdrant.py +1306 -1306
- alma/storage/sqlite_local.py +3041 -1457
- alma/testing/__init__.py +46 -46
- alma/testing/factories.py +301 -301
- alma/testing/mocks.py +389 -389
- alma/types.py +292 -264
- alma/utils/__init__.py +19 -0
- alma/utils/tokenizer.py +521 -0
- alma/workflow/__init__.py +83 -0
- alma/workflow/artifacts.py +170 -0
- alma/workflow/checkpoint.py +311 -0
- alma/workflow/context.py +228 -0
- alma/workflow/outcomes.py +189 -0
- alma/workflow/reducers.py +393 -0
- {alma_memory-0.5.1.dist-info → alma_memory-0.7.0.dist-info}/METADATA +210 -72
- alma_memory-0.7.0.dist-info/RECORD +112 -0
- alma_memory-0.5.1.dist-info/RECORD +0 -93
- {alma_memory-0.5.1.dist-info → alma_memory-0.7.0.dist-info}/WHEEL +0 -0
- {alma_memory-0.5.1.dist-info → alma_memory-0.7.0.dist-info}/top_level.txt +0 -0
alma/storage/__init__.py
CHANGED
|
@@ -1,90 +1,101 @@
|
|
|
1
|
-
"""ALMA Storage Backends."""
|
|
2
|
-
|
|
3
|
-
from alma.storage.
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
"
|
|
76
|
-
"
|
|
77
|
-
|
|
78
|
-
"
|
|
79
|
-
"
|
|
80
|
-
"
|
|
81
|
-
"
|
|
82
|
-
"
|
|
83
|
-
#
|
|
84
|
-
"
|
|
85
|
-
"
|
|
86
|
-
"
|
|
87
|
-
"
|
|
88
|
-
"
|
|
89
|
-
|
|
90
|
-
|
|
1
|
+
"""ALMA Storage Backends."""
|
|
2
|
+
|
|
3
|
+
from alma.storage.archive import (
|
|
4
|
+
ArchiveConfig,
|
|
5
|
+
ArchivedMemory,
|
|
6
|
+
ArchiveReason,
|
|
7
|
+
ArchiveStats,
|
|
8
|
+
)
|
|
9
|
+
from alma.storage.base import StorageBackend
|
|
10
|
+
from alma.storage.constants import (
|
|
11
|
+
AZURE_COSMOS_CONTAINER_NAMES,
|
|
12
|
+
POSTGRESQL_TABLE_NAMES,
|
|
13
|
+
SQLITE_TABLE_NAMES,
|
|
14
|
+
MemoryType,
|
|
15
|
+
get_table_name,
|
|
16
|
+
get_table_names,
|
|
17
|
+
)
|
|
18
|
+
from alma.storage.file_based import FileBasedStorage
|
|
19
|
+
from alma.storage.migrations import (
|
|
20
|
+
Migration,
|
|
21
|
+
MigrationError,
|
|
22
|
+
MigrationRegistry,
|
|
23
|
+
MigrationRunner,
|
|
24
|
+
SchemaVersion,
|
|
25
|
+
)
|
|
26
|
+
from alma.storage.sqlite_local import SQLiteStorage
|
|
27
|
+
|
|
28
|
+
# Azure Cosmos DB is optional - requires azure-cosmos package
|
|
29
|
+
try:
|
|
30
|
+
from alma.storage.azure_cosmos import AzureCosmosStorage
|
|
31
|
+
|
|
32
|
+
_HAS_AZURE = True
|
|
33
|
+
except ImportError:
|
|
34
|
+
AzureCosmosStorage = None # type: ignore
|
|
35
|
+
_HAS_AZURE = False
|
|
36
|
+
|
|
37
|
+
# PostgreSQL is optional - requires psycopg package
|
|
38
|
+
try:
|
|
39
|
+
from alma.storage.postgresql import PostgreSQLStorage
|
|
40
|
+
|
|
41
|
+
_HAS_POSTGRES = True
|
|
42
|
+
except ImportError:
|
|
43
|
+
PostgreSQLStorage = None # type: ignore
|
|
44
|
+
_HAS_POSTGRES = False
|
|
45
|
+
|
|
46
|
+
# Qdrant is optional - requires qdrant-client package
|
|
47
|
+
try:
|
|
48
|
+
from alma.storage.qdrant import QdrantStorage
|
|
49
|
+
|
|
50
|
+
_HAS_QDRANT = True
|
|
51
|
+
except ImportError:
|
|
52
|
+
QdrantStorage = None # type: ignore
|
|
53
|
+
_HAS_QDRANT = False
|
|
54
|
+
|
|
55
|
+
# ChromaDB is optional - requires chromadb package
|
|
56
|
+
try:
|
|
57
|
+
from alma.storage.chroma import ChromaStorage
|
|
58
|
+
|
|
59
|
+
_HAS_CHROMA = True
|
|
60
|
+
except ImportError:
|
|
61
|
+
ChromaStorage = None # type: ignore
|
|
62
|
+
_HAS_CHROMA = False
|
|
63
|
+
|
|
64
|
+
# Pinecone is optional - requires pinecone-client package
|
|
65
|
+
try:
|
|
66
|
+
from alma.storage.pinecone import PineconeStorage
|
|
67
|
+
|
|
68
|
+
_HAS_PINECONE = True
|
|
69
|
+
except ImportError:
|
|
70
|
+
PineconeStorage = None # type: ignore
|
|
71
|
+
_HAS_PINECONE = False
|
|
72
|
+
|
|
73
|
+
__all__ = [
|
|
74
|
+
# Storage backends
|
|
75
|
+
"StorageBackend",
|
|
76
|
+
"FileBasedStorage",
|
|
77
|
+
"SQLiteStorage",
|
|
78
|
+
"AzureCosmosStorage",
|
|
79
|
+
"PostgreSQLStorage",
|
|
80
|
+
"QdrantStorage",
|
|
81
|
+
"ChromaStorage",
|
|
82
|
+
"PineconeStorage",
|
|
83
|
+
# Migration framework
|
|
84
|
+
"Migration",
|
|
85
|
+
"MigrationError",
|
|
86
|
+
"MigrationRegistry",
|
|
87
|
+
"MigrationRunner",
|
|
88
|
+
"SchemaVersion",
|
|
89
|
+
# Archive system (v0.7.0+)
|
|
90
|
+
"ArchivedMemory",
|
|
91
|
+
"ArchiveConfig",
|
|
92
|
+
"ArchiveReason",
|
|
93
|
+
"ArchiveStats",
|
|
94
|
+
# Constants for consistent naming
|
|
95
|
+
"MemoryType",
|
|
96
|
+
"get_table_name",
|
|
97
|
+
"get_table_names",
|
|
98
|
+
"POSTGRESQL_TABLE_NAMES",
|
|
99
|
+
"SQLITE_TABLE_NAMES",
|
|
100
|
+
"AZURE_COSMOS_CONTAINER_NAMES",
|
|
101
|
+
]
|
alma/storage/archive.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ALMA Memory Archive System.
|
|
3
|
+
|
|
4
|
+
Provides a safety net for memories before permanent deletion, supporting:
|
|
5
|
+
- Recovery of accidentally deleted memories
|
|
6
|
+
- Compliance and audit requirements
|
|
7
|
+
- Analysis of forgotten memories
|
|
8
|
+
|
|
9
|
+
Archives preserve full memory data with metadata about why/when archived.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import uuid
|
|
13
|
+
from dataclasses import dataclass, field
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from enum import Enum
|
|
16
|
+
from typing import Any, Dict, List, Optional
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ArchiveReason(Enum):
|
|
20
|
+
"""Reasons for archiving a memory."""
|
|
21
|
+
|
|
22
|
+
DECAY = "decay" # Natural decay below threshold
|
|
23
|
+
MANUAL = "manual" # User-initiated archival
|
|
24
|
+
CONSOLIDATION = "consolidation" # Merged into another memory
|
|
25
|
+
SUPERSEDED = "superseded" # Replaced by better memory
|
|
26
|
+
QUOTA = "quota" # Agent quota exceeded
|
|
27
|
+
CLEANUP = "cleanup" # General cleanup operation
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class ArchivedMemory:
|
|
32
|
+
"""
|
|
33
|
+
A memory that has been archived (soft-deleted).
|
|
34
|
+
|
|
35
|
+
Archives preserve the full memory content along with metadata
|
|
36
|
+
about when and why the memory was archived, enabling recovery
|
|
37
|
+
and audit capabilities.
|
|
38
|
+
|
|
39
|
+
Attributes:
|
|
40
|
+
id: Unique archive identifier
|
|
41
|
+
original_id: Original memory ID before archival
|
|
42
|
+
memory_type: Type of memory (heuristic, outcome, knowledge, etc.)
|
|
43
|
+
content: Serialized memory content (JSON)
|
|
44
|
+
embedding: Original embedding vector (if available)
|
|
45
|
+
metadata: Additional memory metadata
|
|
46
|
+
original_created_at: When the memory was originally created
|
|
47
|
+
archived_at: When the memory was archived
|
|
48
|
+
archive_reason: Why the memory was archived
|
|
49
|
+
final_strength: Memory strength at time of archival
|
|
50
|
+
project_id: Project the memory belonged to
|
|
51
|
+
agent: Agent the memory belonged to
|
|
52
|
+
restored: Whether this archive has been restored
|
|
53
|
+
restored_at: When the archive was restored (if applicable)
|
|
54
|
+
restored_as: New memory ID after restoration (if applicable)
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
id: str
|
|
58
|
+
original_id: str
|
|
59
|
+
memory_type: str
|
|
60
|
+
content: str # JSON serialized memory content
|
|
61
|
+
embedding: Optional[List[float]]
|
|
62
|
+
metadata: Dict[str, Any]
|
|
63
|
+
original_created_at: datetime
|
|
64
|
+
archived_at: datetime
|
|
65
|
+
archive_reason: str
|
|
66
|
+
final_strength: float
|
|
67
|
+
project_id: str
|
|
68
|
+
agent: str
|
|
69
|
+
restored: bool = False
|
|
70
|
+
restored_at: Optional[datetime] = None
|
|
71
|
+
restored_as: Optional[str] = None
|
|
72
|
+
|
|
73
|
+
@classmethod
|
|
74
|
+
def create(
|
|
75
|
+
cls,
|
|
76
|
+
original_id: str,
|
|
77
|
+
memory_type: str,
|
|
78
|
+
content: str,
|
|
79
|
+
project_id: str,
|
|
80
|
+
agent: str,
|
|
81
|
+
archive_reason: str,
|
|
82
|
+
final_strength: float,
|
|
83
|
+
original_created_at: Optional[datetime] = None,
|
|
84
|
+
embedding: Optional[List[float]] = None,
|
|
85
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
86
|
+
) -> "ArchivedMemory":
|
|
87
|
+
"""
|
|
88
|
+
Create a new archived memory.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
original_id: Original memory ID
|
|
92
|
+
memory_type: Type of memory
|
|
93
|
+
content: Serialized memory content
|
|
94
|
+
project_id: Project ID
|
|
95
|
+
agent: Agent name
|
|
96
|
+
archive_reason: Reason for archival
|
|
97
|
+
final_strength: Strength at archival
|
|
98
|
+
original_created_at: Original creation time
|
|
99
|
+
embedding: Optional embedding vector
|
|
100
|
+
metadata: Optional additional metadata
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
New ArchivedMemory instance
|
|
104
|
+
"""
|
|
105
|
+
now = datetime.now(timezone.utc)
|
|
106
|
+
return cls(
|
|
107
|
+
id=f"archive-{uuid.uuid4().hex[:12]}",
|
|
108
|
+
original_id=original_id,
|
|
109
|
+
memory_type=memory_type,
|
|
110
|
+
content=content,
|
|
111
|
+
embedding=embedding,
|
|
112
|
+
metadata=metadata or {},
|
|
113
|
+
original_created_at=original_created_at or now,
|
|
114
|
+
archived_at=now,
|
|
115
|
+
archive_reason=archive_reason,
|
|
116
|
+
final_strength=final_strength,
|
|
117
|
+
project_id=project_id,
|
|
118
|
+
agent=agent,
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
122
|
+
"""Convert to dictionary for serialization."""
|
|
123
|
+
return {
|
|
124
|
+
"id": self.id,
|
|
125
|
+
"original_id": self.original_id,
|
|
126
|
+
"memory_type": self.memory_type,
|
|
127
|
+
"content": self.content,
|
|
128
|
+
"embedding": self.embedding,
|
|
129
|
+
"metadata": self.metadata,
|
|
130
|
+
"original_created_at": self.original_created_at.isoformat(),
|
|
131
|
+
"archived_at": self.archived_at.isoformat(),
|
|
132
|
+
"archive_reason": self.archive_reason,
|
|
133
|
+
"final_strength": self.final_strength,
|
|
134
|
+
"project_id": self.project_id,
|
|
135
|
+
"agent": self.agent,
|
|
136
|
+
"restored": self.restored,
|
|
137
|
+
"restored_at": self.restored_at.isoformat() if self.restored_at else None,
|
|
138
|
+
"restored_as": self.restored_as,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
@classmethod
|
|
142
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ArchivedMemory":
|
|
143
|
+
"""Create from dictionary."""
|
|
144
|
+
original_created_at = data.get("original_created_at")
|
|
145
|
+
if isinstance(original_created_at, str):
|
|
146
|
+
original_created_at = datetime.fromisoformat(
|
|
147
|
+
original_created_at.replace("Z", "+00:00")
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
archived_at = data.get("archived_at")
|
|
151
|
+
if isinstance(archived_at, str):
|
|
152
|
+
archived_at = datetime.fromisoformat(archived_at.replace("Z", "+00:00"))
|
|
153
|
+
elif archived_at is None:
|
|
154
|
+
archived_at = datetime.now(timezone.utc)
|
|
155
|
+
|
|
156
|
+
restored_at = data.get("restored_at")
|
|
157
|
+
if isinstance(restored_at, str):
|
|
158
|
+
restored_at = datetime.fromisoformat(restored_at.replace("Z", "+00:00"))
|
|
159
|
+
|
|
160
|
+
return cls(
|
|
161
|
+
id=data["id"],
|
|
162
|
+
original_id=data["original_id"],
|
|
163
|
+
memory_type=data["memory_type"],
|
|
164
|
+
content=data["content"],
|
|
165
|
+
embedding=data.get("embedding"),
|
|
166
|
+
metadata=data.get("metadata", {}),
|
|
167
|
+
original_created_at=original_created_at,
|
|
168
|
+
archived_at=archived_at,
|
|
169
|
+
archive_reason=data["archive_reason"],
|
|
170
|
+
final_strength=data.get("final_strength", 0.0),
|
|
171
|
+
project_id=data["project_id"],
|
|
172
|
+
agent=data["agent"],
|
|
173
|
+
restored=data.get("restored", False),
|
|
174
|
+
restored_at=restored_at,
|
|
175
|
+
restored_as=data.get("restored_as"),
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
def mark_restored(self, new_memory_id: str) -> None:
|
|
179
|
+
"""Mark this archive as restored."""
|
|
180
|
+
self.restored = True
|
|
181
|
+
self.restored_at = datetime.now(timezone.utc)
|
|
182
|
+
self.restored_as = new_memory_id
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
@dataclass
|
|
186
|
+
class ArchiveConfig:
|
|
187
|
+
"""Configuration for memory archiving."""
|
|
188
|
+
|
|
189
|
+
enabled: bool = True
|
|
190
|
+
retention_days: int = 365 # Keep archives for 1 year by default
|
|
191
|
+
auto_purge: bool = False # Manual purge only by default
|
|
192
|
+
archive_on_decay: bool = True # Archive when decaying memories
|
|
193
|
+
archive_on_consolidation: bool = True # Archive when consolidating
|
|
194
|
+
|
|
195
|
+
@classmethod
|
|
196
|
+
def from_dict(cls, data: Dict[str, Any]) -> "ArchiveConfig":
|
|
197
|
+
"""Create from configuration dictionary."""
|
|
198
|
+
return cls(
|
|
199
|
+
enabled=data.get("enabled", True),
|
|
200
|
+
retention_days=data.get("retention_days", 365),
|
|
201
|
+
auto_purge=data.get("auto_purge", False),
|
|
202
|
+
archive_on_decay=data.get("archive_on_decay", True),
|
|
203
|
+
archive_on_consolidation=data.get("archive_on_consolidation", True),
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
@dataclass
|
|
208
|
+
class ArchiveStats:
|
|
209
|
+
"""Statistics about archived memories."""
|
|
210
|
+
|
|
211
|
+
total_count: int = 0
|
|
212
|
+
by_reason: Dict[str, int] = field(default_factory=dict)
|
|
213
|
+
by_type: Dict[str, int] = field(default_factory=dict)
|
|
214
|
+
by_agent: Dict[str, int] = field(default_factory=dict)
|
|
215
|
+
restored_count: int = 0
|
|
216
|
+
oldest_archive: Optional[datetime] = None
|
|
217
|
+
newest_archive: Optional[datetime] = None
|
|
218
|
+
|
|
219
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
220
|
+
"""Convert to dictionary."""
|
|
221
|
+
return {
|
|
222
|
+
"total_count": self.total_count,
|
|
223
|
+
"by_reason": self.by_reason,
|
|
224
|
+
"by_type": self.by_type,
|
|
225
|
+
"by_agent": self.by_agent,
|
|
226
|
+
"restored_count": self.restored_count,
|
|
227
|
+
"oldest_archive": self.oldest_archive.isoformat()
|
|
228
|
+
if self.oldest_archive
|
|
229
|
+
else None,
|
|
230
|
+
"newest_archive": self.newest_archive.isoformat()
|
|
231
|
+
if self.newest_archive
|
|
232
|
+
else None,
|
|
233
|
+
}
|