spatial-memory-mcp 1.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- spatial_memory/__init__.py +97 -0
- spatial_memory/__main__.py +271 -0
- spatial_memory/adapters/__init__.py +7 -0
- spatial_memory/adapters/lancedb_repository.py +880 -0
- spatial_memory/config.py +769 -0
- spatial_memory/core/__init__.py +118 -0
- spatial_memory/core/cache.py +317 -0
- spatial_memory/core/circuit_breaker.py +297 -0
- spatial_memory/core/connection_pool.py +220 -0
- spatial_memory/core/consolidation_strategies.py +401 -0
- spatial_memory/core/database.py +3072 -0
- spatial_memory/core/db_idempotency.py +242 -0
- spatial_memory/core/db_indexes.py +576 -0
- spatial_memory/core/db_migrations.py +588 -0
- spatial_memory/core/db_search.py +512 -0
- spatial_memory/core/db_versioning.py +178 -0
- spatial_memory/core/embeddings.py +558 -0
- spatial_memory/core/errors.py +317 -0
- spatial_memory/core/file_security.py +701 -0
- spatial_memory/core/filesystem.py +178 -0
- spatial_memory/core/health.py +289 -0
- spatial_memory/core/helpers.py +79 -0
- spatial_memory/core/import_security.py +433 -0
- spatial_memory/core/lifecycle_ops.py +1067 -0
- spatial_memory/core/logging.py +194 -0
- spatial_memory/core/metrics.py +192 -0
- spatial_memory/core/models.py +660 -0
- spatial_memory/core/rate_limiter.py +326 -0
- spatial_memory/core/response_types.py +500 -0
- spatial_memory/core/security.py +588 -0
- spatial_memory/core/spatial_ops.py +430 -0
- spatial_memory/core/tracing.py +300 -0
- spatial_memory/core/utils.py +110 -0
- spatial_memory/core/validation.py +406 -0
- spatial_memory/factory.py +444 -0
- spatial_memory/migrations/__init__.py +40 -0
- spatial_memory/ports/__init__.py +11 -0
- spatial_memory/ports/repositories.py +630 -0
- spatial_memory/py.typed +0 -0
- spatial_memory/server.py +1214 -0
- spatial_memory/services/__init__.py +70 -0
- spatial_memory/services/decay_manager.py +411 -0
- spatial_memory/services/export_import.py +1031 -0
- spatial_memory/services/lifecycle.py +1139 -0
- spatial_memory/services/memory.py +412 -0
- spatial_memory/services/spatial.py +1152 -0
- spatial_memory/services/utility.py +429 -0
- spatial_memory/tools/__init__.py +5 -0
- spatial_memory/tools/definitions.py +695 -0
- spatial_memory/verify.py +140 -0
- spatial_memory_mcp-1.9.1.dist-info/METADATA +509 -0
- spatial_memory_mcp-1.9.1.dist-info/RECORD +55 -0
- spatial_memory_mcp-1.9.1.dist-info/WHEEL +4 -0
- spatial_memory_mcp-1.9.1.dist-info/entry_points.txt +2 -0
- spatial_memory_mcp-1.9.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,660 @@
|
|
|
1
|
+
"""Data models for Spatial Memory MCP Server."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from typing import Any, Literal
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, Field, model_validator
|
|
11
|
+
|
|
12
|
+
from spatial_memory.core.utils import utc_now
|
|
13
|
+
|
|
14
|
+
# Type alias for filter values - covers all expected filter value types
|
|
15
|
+
FilterValue = (
|
|
16
|
+
str | int | float | bool | datetime |
|
|
17
|
+
list[str] | list[int] | list[float] | list[bool] | list[datetime]
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class MemorySource(str, Enum):
|
|
22
|
+
"""Source of a memory."""
|
|
23
|
+
|
|
24
|
+
MANUAL = "manual" # Explicitly stored via remember()
|
|
25
|
+
EXTRACTED = "extracted" # Auto-extracted from conversation
|
|
26
|
+
CONSOLIDATED = "consolidated" # Result of consolidation
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Memory(BaseModel):
|
|
30
|
+
"""A single memory in the spatial memory system."""
|
|
31
|
+
|
|
32
|
+
id: str = Field(..., description="Unique identifier (UUID)")
|
|
33
|
+
content: str = Field(..., description="Text content of the memory", max_length=100000)
|
|
34
|
+
created_at: datetime = Field(default_factory=utc_now)
|
|
35
|
+
updated_at: datetime = Field(default_factory=utc_now)
|
|
36
|
+
last_accessed: datetime = Field(default_factory=utc_now)
|
|
37
|
+
access_count: int = Field(default=0, ge=0)
|
|
38
|
+
importance: float = Field(default=0.5, ge=0.0, le=1.0)
|
|
39
|
+
namespace: str = Field(default="default")
|
|
40
|
+
tags: list[str] = Field(default_factory=list)
|
|
41
|
+
source: MemorySource = Field(default=MemorySource.MANUAL)
|
|
42
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class MemoryResult(BaseModel):
|
|
46
|
+
"""A memory with similarity score from search."""
|
|
47
|
+
|
|
48
|
+
id: str
|
|
49
|
+
content: str
|
|
50
|
+
similarity: float = Field(..., ge=0.0, le=1.0)
|
|
51
|
+
namespace: str
|
|
52
|
+
tags: list[str] = Field(default_factory=list)
|
|
53
|
+
importance: float
|
|
54
|
+
created_at: datetime
|
|
55
|
+
last_accessed: datetime | None = Field(
|
|
56
|
+
default=None,
|
|
57
|
+
description="When the memory was last accessed (for auto-decay)",
|
|
58
|
+
)
|
|
59
|
+
access_count: int = Field(
|
|
60
|
+
default=0,
|
|
61
|
+
ge=0,
|
|
62
|
+
description="Number of times the memory has been accessed (for auto-decay)",
|
|
63
|
+
)
|
|
64
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
65
|
+
vector: list[float] | None = Field(
|
|
66
|
+
default=None,
|
|
67
|
+
description="Embedding vector (only included when include_vector=True in search)",
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class ClusterInfo(BaseModel):
|
|
72
|
+
"""Information about a discovered cluster/region."""
|
|
73
|
+
|
|
74
|
+
cluster_id: int
|
|
75
|
+
label: str # Auto-generated or centroid-based
|
|
76
|
+
size: int
|
|
77
|
+
centroid_memory_id: str # Memory closest to centroid
|
|
78
|
+
sample_memories: list[str] # Sample content from cluster
|
|
79
|
+
coherence: float = Field(ge=0.0, le=1.0) # How tight the cluster is
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class JourneyStep(BaseModel):
|
|
83
|
+
"""A step in a journey between two memories.
|
|
84
|
+
|
|
85
|
+
Represents a point along the interpolated path between two memories,
|
|
86
|
+
with nearby memories discovered at that position.
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
step: int
|
|
90
|
+
t: float = Field(..., ge=0.0, le=1.0, description="Interpolation parameter [0, 1]")
|
|
91
|
+
position: list[float] = Field(..., description="Interpolated vector position")
|
|
92
|
+
nearby_memories: list[MemoryResult] = Field(
|
|
93
|
+
default_factory=list, description="Memories near this path position"
|
|
94
|
+
)
|
|
95
|
+
distance_to_path: float = Field(
|
|
96
|
+
default=0.0, ge=0.0, description="Distance from nearest memory to ideal path"
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class JourneyResult(BaseModel):
|
|
101
|
+
"""Result of a journey operation between two memories.
|
|
102
|
+
|
|
103
|
+
Contains the full path with steps and discovered memories along the way.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
start_id: str = Field(..., description="Starting memory ID")
|
|
107
|
+
end_id: str = Field(..., description="Ending memory ID")
|
|
108
|
+
steps: list[JourneyStep] = Field(default_factory=list, description="Journey steps")
|
|
109
|
+
path_coverage: float = Field(
|
|
110
|
+
default=0.0,
|
|
111
|
+
ge=0.0,
|
|
112
|
+
le=1.0,
|
|
113
|
+
description="Fraction of path with nearby memories",
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class WanderStep(BaseModel):
|
|
118
|
+
"""A single step in a random walk through memory space.
|
|
119
|
+
|
|
120
|
+
Represents transitioning from one memory to another based on
|
|
121
|
+
similarity-weighted random selection.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
step: int = Field(..., ge=0, description="Step number in the walk")
|
|
125
|
+
memory: MemoryResult = Field(..., description="Memory at this step")
|
|
126
|
+
similarity_to_previous: float = Field(
|
|
127
|
+
default=0.0,
|
|
128
|
+
ge=0.0,
|
|
129
|
+
le=1.0,
|
|
130
|
+
description="Similarity to the previous step's memory",
|
|
131
|
+
)
|
|
132
|
+
selection_probability: float = Field(
|
|
133
|
+
default=0.0,
|
|
134
|
+
ge=0.0,
|
|
135
|
+
le=1.0,
|
|
136
|
+
description="Probability this memory was selected",
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class WanderResult(BaseModel):
|
|
141
|
+
"""Result of a wander (random walk) operation.
|
|
142
|
+
|
|
143
|
+
Contains the path taken during the random walk through memory space.
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
start_id: str = Field(..., description="Starting memory ID")
|
|
147
|
+
steps: list[WanderStep] = Field(default_factory=list, description="Walk steps")
|
|
148
|
+
total_distance: float = Field(
|
|
149
|
+
default=0.0, ge=0.0, description="Total distance traveled in embedding space"
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class RegionCluster(BaseModel):
|
|
154
|
+
"""A cluster discovered during regions analysis.
|
|
155
|
+
|
|
156
|
+
Represents a semantic region in memory space with coherent memories.
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
cluster_id: int = Field(..., description="Cluster identifier (-1 for noise)")
|
|
160
|
+
size: int = Field(..., ge=0, description="Number of memories in cluster")
|
|
161
|
+
representative_memory: MemoryResult = Field(
|
|
162
|
+
..., description="Memory closest to cluster centroid"
|
|
163
|
+
)
|
|
164
|
+
sample_memories: list[MemoryResult] = Field(
|
|
165
|
+
default_factory=list, description="Sample memories from the cluster"
|
|
166
|
+
)
|
|
167
|
+
coherence: float = Field(
|
|
168
|
+
default=0.0,
|
|
169
|
+
ge=0.0,
|
|
170
|
+
le=1.0,
|
|
171
|
+
description="Internal cluster coherence (tightness)",
|
|
172
|
+
)
|
|
173
|
+
keywords: list[str] = Field(
|
|
174
|
+
default_factory=list, description="Keywords describing the cluster"
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class RegionsResult(BaseModel):
|
|
179
|
+
"""Result of a regions (clustering) operation.
|
|
180
|
+
|
|
181
|
+
Contains discovered clusters and clustering quality metrics.
|
|
182
|
+
"""
|
|
183
|
+
|
|
184
|
+
clusters: list[RegionCluster] = Field(
|
|
185
|
+
default_factory=list, description="Discovered clusters"
|
|
186
|
+
)
|
|
187
|
+
noise_count: int = Field(
|
|
188
|
+
default=0, ge=0, description="Number of memories not in any cluster"
|
|
189
|
+
)
|
|
190
|
+
total_memories: int = Field(
|
|
191
|
+
default=0, ge=0, description="Total memories analyzed"
|
|
192
|
+
)
|
|
193
|
+
clustering_quality: float = Field(
|
|
194
|
+
default=0.0,
|
|
195
|
+
ge=-1.0,
|
|
196
|
+
le=1.0,
|
|
197
|
+
description="Overall clustering quality (silhouette score)",
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
class VisualizationNode(BaseModel):
|
|
202
|
+
"""A node in the visualization."""
|
|
203
|
+
|
|
204
|
+
id: str
|
|
205
|
+
x: float
|
|
206
|
+
y: float
|
|
207
|
+
label: str
|
|
208
|
+
cluster: int = -1 # -1 for noise/unclustered
|
|
209
|
+
importance: float = 0.5
|
|
210
|
+
highlighted: bool = False
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
class VisualizationEdge(BaseModel):
|
|
214
|
+
"""An edge connecting two nodes in visualization."""
|
|
215
|
+
|
|
216
|
+
from_id: str
|
|
217
|
+
to_id: str
|
|
218
|
+
weight: float = Field(ge=0.0, le=1.0)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class VisualizationCluster(BaseModel):
|
|
222
|
+
"""Cluster metadata for visualization."""
|
|
223
|
+
|
|
224
|
+
id: int
|
|
225
|
+
label: str
|
|
226
|
+
color: str
|
|
227
|
+
center_x: float
|
|
228
|
+
center_y: float
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class VisualizationData(BaseModel):
|
|
232
|
+
"""Data for visualizing the memory space."""
|
|
233
|
+
|
|
234
|
+
nodes: list[VisualizationNode]
|
|
235
|
+
edges: list[VisualizationEdge] = Field(default_factory=list)
|
|
236
|
+
clusters: list[VisualizationCluster] = Field(default_factory=list)
|
|
237
|
+
bounds: dict[str, float] = Field(
|
|
238
|
+
default_factory=lambda: {"x_min": -1.0, "x_max": 1.0, "y_min": -1.0, "y_max": 1.0}
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class VisualizationResult(BaseModel):
|
|
243
|
+
"""Result of a visualization operation.
|
|
244
|
+
|
|
245
|
+
Contains the complete visualization output including nodes, edges,
|
|
246
|
+
and the formatted output string.
|
|
247
|
+
"""
|
|
248
|
+
|
|
249
|
+
nodes: list[VisualizationNode] = Field(
|
|
250
|
+
default_factory=list, description="Visualization nodes"
|
|
251
|
+
)
|
|
252
|
+
edges: list[VisualizationEdge] = Field(
|
|
253
|
+
default_factory=list, description="Connections between nodes"
|
|
254
|
+
)
|
|
255
|
+
bounds: dict[str, float] = Field(
|
|
256
|
+
default_factory=lambda: {
|
|
257
|
+
"x_min": -1.0,
|
|
258
|
+
"x_max": 1.0,
|
|
259
|
+
"y_min": -1.0,
|
|
260
|
+
"y_max": 1.0,
|
|
261
|
+
},
|
|
262
|
+
description="Coordinate bounds of the visualization",
|
|
263
|
+
)
|
|
264
|
+
format: str = Field(
|
|
265
|
+
default="json",
|
|
266
|
+
description="Output format (json, mermaid, svg)",
|
|
267
|
+
)
|
|
268
|
+
output: str = Field(
|
|
269
|
+
default="", description="Formatted output string in the specified format"
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
class FilterOperator(str, Enum):
|
|
274
|
+
"""Filter operators for querying memories."""
|
|
275
|
+
|
|
276
|
+
EQ = "eq" # Equal
|
|
277
|
+
NE = "ne" # Not equal
|
|
278
|
+
GT = "gt" # Greater than
|
|
279
|
+
GTE = "gte" # Greater than or equal
|
|
280
|
+
LT = "lt" # Less than
|
|
281
|
+
LTE = "lte" # Less than or equal
|
|
282
|
+
IN = "in" # In list
|
|
283
|
+
NIN = "nin" # Not in list
|
|
284
|
+
CONTAINS = "contains" # String/list contains
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
class Filter(BaseModel):
|
|
288
|
+
"""A single filter condition."""
|
|
289
|
+
|
|
290
|
+
field: str
|
|
291
|
+
operator: FilterOperator
|
|
292
|
+
value: FilterValue
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
class FilterGroup(BaseModel):
|
|
296
|
+
"""A group of filters with logical operator."""
|
|
297
|
+
|
|
298
|
+
operator: Literal["and", "or"] = "and"
|
|
299
|
+
filters: list[Filter | FilterGroup] = Field(..., min_length=1)
|
|
300
|
+
|
|
301
|
+
@model_validator(mode="after")
|
|
302
|
+
def validate_filters_not_empty(self) -> FilterGroup:
|
|
303
|
+
"""Validate that filters list is not empty."""
|
|
304
|
+
if not self.filters:
|
|
305
|
+
raise ValueError("FilterGroup must contain at least one filter")
|
|
306
|
+
return self
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
# Update forward references
|
|
310
|
+
FilterGroup.model_rebuild()
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
# =============================================================================
|
|
314
|
+
# Lifecycle Enums
|
|
315
|
+
# =============================================================================
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
class DecayFunction(str, Enum):
|
|
319
|
+
"""Decay function types."""
|
|
320
|
+
|
|
321
|
+
EXPONENTIAL = "exponential"
|
|
322
|
+
LINEAR = "linear"
|
|
323
|
+
STEP = "step"
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
class BoostType(str, Enum):
|
|
327
|
+
"""Reinforcement boost types."""
|
|
328
|
+
|
|
329
|
+
ADDITIVE = "additive"
|
|
330
|
+
MULTIPLICATIVE = "multiplicative"
|
|
331
|
+
SET_VALUE = "set_value"
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
class ConsolidationStrategy(str, Enum):
|
|
335
|
+
"""Consolidation strategies."""
|
|
336
|
+
|
|
337
|
+
KEEP_NEWEST = "keep_newest"
|
|
338
|
+
KEEP_OLDEST = "keep_oldest"
|
|
339
|
+
KEEP_HIGHEST_IMPORTANCE = "keep_highest_importance"
|
|
340
|
+
MERGE_CONTENT = "merge_content"
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
class ExtractionPatternType(str, Enum):
|
|
344
|
+
"""Types of extracted content."""
|
|
345
|
+
|
|
346
|
+
DECISION = "decision"
|
|
347
|
+
DEFINITION = "definition"
|
|
348
|
+
SOLUTION = "solution"
|
|
349
|
+
ERROR = "error"
|
|
350
|
+
PATTERN = "pattern"
|
|
351
|
+
EXPLICIT = "explicit"
|
|
352
|
+
IMPORTANT = "important"
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
# =============================================================================
|
|
356
|
+
# Lifecycle Result Dataclasses
|
|
357
|
+
# =============================================================================
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
@dataclass
|
|
361
|
+
class DecayedMemory:
|
|
362
|
+
"""A memory with calculated decay."""
|
|
363
|
+
|
|
364
|
+
id: str
|
|
365
|
+
content_preview: str
|
|
366
|
+
old_importance: float
|
|
367
|
+
new_importance: float
|
|
368
|
+
decay_factor: float
|
|
369
|
+
days_since_access: int
|
|
370
|
+
access_count: int
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
@dataclass
|
|
374
|
+
class DecayResult:
|
|
375
|
+
"""Result of decay operation."""
|
|
376
|
+
|
|
377
|
+
memories_analyzed: int
|
|
378
|
+
memories_decayed: int
|
|
379
|
+
avg_decay_factor: float
|
|
380
|
+
decayed_memories: list[DecayedMemory] = field(default_factory=list)
|
|
381
|
+
dry_run: bool = True
|
|
382
|
+
failed_updates: list[str] = field(default_factory=list) # IDs that failed to update
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
@dataclass
|
|
386
|
+
class ReinforcedMemory:
|
|
387
|
+
"""A memory that was reinforced."""
|
|
388
|
+
|
|
389
|
+
id: str
|
|
390
|
+
content_preview: str
|
|
391
|
+
old_importance: float
|
|
392
|
+
new_importance: float
|
|
393
|
+
boost_applied: float
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
@dataclass
|
|
397
|
+
class ReinforceResult:
|
|
398
|
+
"""Result of reinforcement operation."""
|
|
399
|
+
|
|
400
|
+
memories_reinforced: int
|
|
401
|
+
avg_boost: float
|
|
402
|
+
reinforced: list[ReinforcedMemory] = field(default_factory=list)
|
|
403
|
+
not_found: list[str] = field(default_factory=list)
|
|
404
|
+
failed_updates: list[str] = field(default_factory=list) # IDs that failed to update
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
@dataclass
|
|
408
|
+
class ExtractedMemory:
|
|
409
|
+
"""A memory candidate extracted from text."""
|
|
410
|
+
|
|
411
|
+
content: str
|
|
412
|
+
confidence: float
|
|
413
|
+
pattern_matched: str
|
|
414
|
+
start_pos: int
|
|
415
|
+
end_pos: int
|
|
416
|
+
stored: bool # False if deduplicated
|
|
417
|
+
memory_id: str | None = None # Set if stored
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
@dataclass
|
|
421
|
+
class ExtractResult:
|
|
422
|
+
"""Result of memory extraction."""
|
|
423
|
+
|
|
424
|
+
candidates_found: int
|
|
425
|
+
memories_created: int
|
|
426
|
+
deduplicated_count: int
|
|
427
|
+
extractions: list[ExtractedMemory] = field(default_factory=list)
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
@dataclass
|
|
431
|
+
class ConsolidationGroup:
|
|
432
|
+
"""A group of similar memories."""
|
|
433
|
+
|
|
434
|
+
representative_id: str
|
|
435
|
+
member_ids: list[str]
|
|
436
|
+
avg_similarity: float
|
|
437
|
+
action_taken: str # "merged", "deleted", "preview"
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
@dataclass
|
|
441
|
+
class ConsolidateResult:
|
|
442
|
+
"""Result of consolidation."""
|
|
443
|
+
|
|
444
|
+
groups_found: int
|
|
445
|
+
memories_merged: int
|
|
446
|
+
memories_deleted: int
|
|
447
|
+
groups: list[ConsolidationGroup] = field(default_factory=list)
|
|
448
|
+
dry_run: bool = True
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
# =============================================================================
|
|
452
|
+
# Phase 5 Utility Result Dataclasses
|
|
453
|
+
# =============================================================================
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
@dataclass
|
|
457
|
+
class IndexInfo:
|
|
458
|
+
"""Information about a single database index."""
|
|
459
|
+
|
|
460
|
+
name: str
|
|
461
|
+
index_type: str
|
|
462
|
+
column: str
|
|
463
|
+
num_indexed_rows: int
|
|
464
|
+
status: str # "ready", "building", "needs_update"
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
@dataclass
|
|
468
|
+
class StatsResult:
|
|
469
|
+
"""Result of database statistics query."""
|
|
470
|
+
|
|
471
|
+
total_memories: int
|
|
472
|
+
memories_by_namespace: dict[str, int]
|
|
473
|
+
storage_bytes: int
|
|
474
|
+
storage_mb: float
|
|
475
|
+
estimated_vector_bytes: int
|
|
476
|
+
has_vector_index: bool
|
|
477
|
+
has_fts_index: bool
|
|
478
|
+
indices: list[IndexInfo]
|
|
479
|
+
num_fragments: int
|
|
480
|
+
needs_compaction: bool
|
|
481
|
+
table_version: int
|
|
482
|
+
oldest_memory_date: datetime | None = None
|
|
483
|
+
newest_memory_date: datetime | None = None
|
|
484
|
+
avg_content_length: float | None = None
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
@dataclass
|
|
488
|
+
class NamespaceInfo:
|
|
489
|
+
"""Information about a single namespace."""
|
|
490
|
+
|
|
491
|
+
name: str
|
|
492
|
+
memory_count: int
|
|
493
|
+
oldest_memory: datetime | None = None
|
|
494
|
+
newest_memory: datetime | None = None
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
@dataclass
|
|
498
|
+
class NamespacesResult:
|
|
499
|
+
"""Result of namespace listing."""
|
|
500
|
+
|
|
501
|
+
namespaces: list[NamespaceInfo]
|
|
502
|
+
total_namespaces: int
|
|
503
|
+
total_memories: int
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
@dataclass
|
|
507
|
+
class DeleteNamespaceResult:
|
|
508
|
+
"""Result of namespace deletion."""
|
|
509
|
+
|
|
510
|
+
namespace: str
|
|
511
|
+
memories_deleted: int
|
|
512
|
+
success: bool
|
|
513
|
+
message: str
|
|
514
|
+
dry_run: bool = False
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
@dataclass
|
|
518
|
+
class RenameNamespaceResult:
|
|
519
|
+
"""Result of namespace rename."""
|
|
520
|
+
|
|
521
|
+
old_namespace: str
|
|
522
|
+
new_namespace: str
|
|
523
|
+
memories_renamed: int
|
|
524
|
+
success: bool
|
|
525
|
+
message: str
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
@dataclass
|
|
529
|
+
class ExportResult:
|
|
530
|
+
"""Result of memory export."""
|
|
531
|
+
|
|
532
|
+
format: str # parquet, json, csv
|
|
533
|
+
output_path: str
|
|
534
|
+
memories_exported: int
|
|
535
|
+
file_size_bytes: int
|
|
536
|
+
file_size_mb: float
|
|
537
|
+
namespaces_included: list[str]
|
|
538
|
+
duration_seconds: float
|
|
539
|
+
compression: str | None = None
|
|
540
|
+
|
|
541
|
+
|
|
542
|
+
@dataclass
|
|
543
|
+
class ImportedMemory:
|
|
544
|
+
"""Information about a single imported memory."""
|
|
545
|
+
|
|
546
|
+
id: str
|
|
547
|
+
content_preview: str
|
|
548
|
+
namespace: str
|
|
549
|
+
was_deduplicated: bool = False
|
|
550
|
+
original_id: str | None = None
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
@dataclass
|
|
554
|
+
class ImportValidationError:
|
|
555
|
+
"""A validation error during import."""
|
|
556
|
+
|
|
557
|
+
row_number: int
|
|
558
|
+
field: str
|
|
559
|
+
error: str
|
|
560
|
+
value: str | None = None
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
@dataclass
|
|
564
|
+
class ImportResult:
|
|
565
|
+
"""Result of memory import."""
|
|
566
|
+
|
|
567
|
+
source_path: str
|
|
568
|
+
format: str
|
|
569
|
+
total_records_in_file: int
|
|
570
|
+
memories_imported: int
|
|
571
|
+
memories_skipped: int
|
|
572
|
+
memories_failed: int
|
|
573
|
+
validation_errors: list[ImportValidationError]
|
|
574
|
+
duration_seconds: float
|
|
575
|
+
namespace_override: str | None = None
|
|
576
|
+
imported_memories: list[ImportedMemory] | None = None
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
@dataclass
|
|
580
|
+
class HybridMemoryMatch:
|
|
581
|
+
"""A memory matched by hybrid search."""
|
|
582
|
+
|
|
583
|
+
id: str
|
|
584
|
+
content: str
|
|
585
|
+
similarity: float
|
|
586
|
+
namespace: str
|
|
587
|
+
tags: list[str]
|
|
588
|
+
importance: float
|
|
589
|
+
created_at: datetime
|
|
590
|
+
metadata: dict[str, Any]
|
|
591
|
+
vector_score: float | None = None
|
|
592
|
+
fts_score: float | None = None
|
|
593
|
+
combined_score: float = 0.0
|
|
594
|
+
last_accessed: datetime | None = None # For auto-decay
|
|
595
|
+
access_count: int = 0 # For auto-decay
|
|
596
|
+
|
|
597
|
+
|
|
598
|
+
@dataclass
|
|
599
|
+
class HybridRecallResult:
|
|
600
|
+
"""Result of hybrid recall operation."""
|
|
601
|
+
|
|
602
|
+
query: str
|
|
603
|
+
alpha: float
|
|
604
|
+
memories: list[HybridMemoryMatch]
|
|
605
|
+
total: int
|
|
606
|
+
search_type: str = "hybrid"
|
|
607
|
+
|
|
608
|
+
|
|
609
|
+
# =============================================================================
|
|
610
|
+
# Phase 5 Service Configuration Dataclasses
|
|
611
|
+
# =============================================================================
|
|
612
|
+
|
|
613
|
+
|
|
614
|
+
@dataclass
|
|
615
|
+
class UtilityConfig:
|
|
616
|
+
"""Configuration for utility operations."""
|
|
617
|
+
|
|
618
|
+
hybrid_default_alpha: float = 0.5
|
|
619
|
+
hybrid_min_alpha: float = 0.0
|
|
620
|
+
hybrid_max_alpha: float = 1.0
|
|
621
|
+
stats_include_index_details: bool = True
|
|
622
|
+
namespace_batch_size: int = 1000
|
|
623
|
+
delete_namespace_require_confirmation: bool = True
|
|
624
|
+
|
|
625
|
+
|
|
626
|
+
@dataclass
|
|
627
|
+
class ExportImportConfig:
|
|
628
|
+
"""Configuration for export/import operations."""
|
|
629
|
+
|
|
630
|
+
default_export_format: str = "parquet"
|
|
631
|
+
export_batch_size: int = 5000
|
|
632
|
+
import_batch_size: int = 1000
|
|
633
|
+
import_deduplicate: bool = False
|
|
634
|
+
import_dedup_threshold: float = 0.95
|
|
635
|
+
validate_on_import: bool = True
|
|
636
|
+
parquet_compression: str = "zstd"
|
|
637
|
+
csv_include_vectors: bool = False
|
|
638
|
+
max_export_records: int = 0 # 0 = unlimited
|
|
639
|
+
max_import_records: int = 100_000 # Maximum records per import
|
|
640
|
+
|
|
641
|
+
|
|
642
|
+
@dataclass
|
|
643
|
+
class AutoDecayConfig:
|
|
644
|
+
"""Configuration for automatic decay during recall operations.
|
|
645
|
+
|
|
646
|
+
Auto-decay calculates effective importance in real-time during searches,
|
|
647
|
+
re-ranking results based on time-decayed importance. Updates can optionally
|
|
648
|
+
be persisted to the database in the background.
|
|
649
|
+
"""
|
|
650
|
+
|
|
651
|
+
enabled: bool = True
|
|
652
|
+
persist_enabled: bool = True
|
|
653
|
+
persist_batch_size: int = 100
|
|
654
|
+
persist_flush_interval_seconds: float = 5.0
|
|
655
|
+
min_change_threshold: float = 0.01 # Only persist changes > 1%
|
|
656
|
+
max_queue_size: int = 10000
|
|
657
|
+
half_life_days: float = 30.0
|
|
658
|
+
min_importance_floor: float = 0.1
|
|
659
|
+
access_weight: float = 0.3 # Weight of access count in slowing decay
|
|
660
|
+
decay_function: Literal["exponential", "linear", "step"] = "exponential"
|