spatial-memory-mcp 1.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of spatial-memory-mcp might be problematic. Click here for more details.

Files changed (54) hide show
  1. spatial_memory/__init__.py +97 -0
  2. spatial_memory/__main__.py +270 -0
  3. spatial_memory/adapters/__init__.py +7 -0
  4. spatial_memory/adapters/lancedb_repository.py +878 -0
  5. spatial_memory/config.py +728 -0
  6. spatial_memory/core/__init__.py +118 -0
  7. spatial_memory/core/cache.py +317 -0
  8. spatial_memory/core/circuit_breaker.py +297 -0
  9. spatial_memory/core/connection_pool.py +220 -0
  10. spatial_memory/core/consolidation_strategies.py +402 -0
  11. spatial_memory/core/database.py +3069 -0
  12. spatial_memory/core/db_idempotency.py +242 -0
  13. spatial_memory/core/db_indexes.py +575 -0
  14. spatial_memory/core/db_migrations.py +584 -0
  15. spatial_memory/core/db_search.py +509 -0
  16. spatial_memory/core/db_versioning.py +177 -0
  17. spatial_memory/core/embeddings.py +557 -0
  18. spatial_memory/core/errors.py +317 -0
  19. spatial_memory/core/file_security.py +702 -0
  20. spatial_memory/core/filesystem.py +178 -0
  21. spatial_memory/core/health.py +289 -0
  22. spatial_memory/core/helpers.py +79 -0
  23. spatial_memory/core/import_security.py +432 -0
  24. spatial_memory/core/lifecycle_ops.py +1067 -0
  25. spatial_memory/core/logging.py +194 -0
  26. spatial_memory/core/metrics.py +192 -0
  27. spatial_memory/core/models.py +628 -0
  28. spatial_memory/core/rate_limiter.py +326 -0
  29. spatial_memory/core/response_types.py +497 -0
  30. spatial_memory/core/security.py +588 -0
  31. spatial_memory/core/spatial_ops.py +426 -0
  32. spatial_memory/core/tracing.py +300 -0
  33. spatial_memory/core/utils.py +110 -0
  34. spatial_memory/core/validation.py +403 -0
  35. spatial_memory/factory.py +407 -0
  36. spatial_memory/migrations/__init__.py +40 -0
  37. spatial_memory/ports/__init__.py +11 -0
  38. spatial_memory/ports/repositories.py +631 -0
  39. spatial_memory/py.typed +0 -0
  40. spatial_memory/server.py +1141 -0
  41. spatial_memory/services/__init__.py +70 -0
  42. spatial_memory/services/export_import.py +1023 -0
  43. spatial_memory/services/lifecycle.py +1120 -0
  44. spatial_memory/services/memory.py +412 -0
  45. spatial_memory/services/spatial.py +1147 -0
  46. spatial_memory/services/utility.py +409 -0
  47. spatial_memory/tools/__init__.py +5 -0
  48. spatial_memory/tools/definitions.py +695 -0
  49. spatial_memory/verify.py +140 -0
  50. spatial_memory_mcp-1.6.1.dist-info/METADATA +499 -0
  51. spatial_memory_mcp-1.6.1.dist-info/RECORD +54 -0
  52. spatial_memory_mcp-1.6.1.dist-info/WHEEL +4 -0
  53. spatial_memory_mcp-1.6.1.dist-info/entry_points.txt +2 -0
  54. spatial_memory_mcp-1.6.1.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,628 @@
1
+ """Data models for Spatial Memory MCP Server."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from datetime import datetime
7
+ from enum import Enum
8
+ from typing import Any, Literal
9
+
10
+ from pydantic import BaseModel, Field, model_validator
11
+
12
+ from spatial_memory.core.utils import utc_now
13
+
14
+ # Type alias for filter values - covers all expected filter value types
15
+ FilterValue = (
16
+ str | int | float | bool | datetime |
17
+ list[str] | list[int] | list[float] | list[bool] | list[datetime]
18
+ )
19
+
20
+
21
+ class MemorySource(str, Enum):
22
+ """Source of a memory."""
23
+
24
+ MANUAL = "manual" # Explicitly stored via remember()
25
+ EXTRACTED = "extracted" # Auto-extracted from conversation
26
+ CONSOLIDATED = "consolidated" # Result of consolidation
27
+
28
+
29
+ class Memory(BaseModel):
30
+ """A single memory in the spatial memory system."""
31
+
32
+ id: str = Field(..., description="Unique identifier (UUID)")
33
+ content: str = Field(..., description="Text content of the memory", max_length=100000)
34
+ created_at: datetime = Field(default_factory=utc_now)
35
+ updated_at: datetime = Field(default_factory=utc_now)
36
+ last_accessed: datetime = Field(default_factory=utc_now)
37
+ access_count: int = Field(default=0, ge=0)
38
+ importance: float = Field(default=0.5, ge=0.0, le=1.0)
39
+ namespace: str = Field(default="default")
40
+ tags: list[str] = Field(default_factory=list)
41
+ source: MemorySource = Field(default=MemorySource.MANUAL)
42
+ metadata: dict[str, Any] = Field(default_factory=dict)
43
+
44
+
45
+ class MemoryResult(BaseModel):
46
+ """A memory with similarity score from search."""
47
+
48
+ id: str
49
+ content: str
50
+ similarity: float = Field(..., ge=0.0, le=1.0)
51
+ namespace: str
52
+ tags: list[str] = Field(default_factory=list)
53
+ importance: float
54
+ created_at: datetime
55
+ metadata: dict[str, Any] = Field(default_factory=dict)
56
+ vector: list[float] | None = Field(
57
+ default=None,
58
+ description="Embedding vector (only included when include_vector=True in search)",
59
+ )
60
+
61
+
62
+ class ClusterInfo(BaseModel):
63
+ """Information about a discovered cluster/region."""
64
+
65
+ cluster_id: int
66
+ label: str # Auto-generated or centroid-based
67
+ size: int
68
+ centroid_memory_id: str # Memory closest to centroid
69
+ sample_memories: list[str] # Sample content from cluster
70
+ coherence: float = Field(ge=0.0, le=1.0) # How tight the cluster is
71
+
72
+
73
+ class JourneyStep(BaseModel):
74
+ """A step in a journey between two memories.
75
+
76
+ Represents a point along the interpolated path between two memories,
77
+ with nearby memories discovered at that position.
78
+ """
79
+
80
+ step: int
81
+ t: float = Field(..., ge=0.0, le=1.0, description="Interpolation parameter [0, 1]")
82
+ position: list[float] = Field(..., description="Interpolated vector position")
83
+ nearby_memories: list[MemoryResult] = Field(
84
+ default_factory=list, description="Memories near this path position"
85
+ )
86
+ distance_to_path: float = Field(
87
+ default=0.0, ge=0.0, description="Distance from nearest memory to ideal path"
88
+ )
89
+
90
+
91
+ class JourneyResult(BaseModel):
92
+ """Result of a journey operation between two memories.
93
+
94
+ Contains the full path with steps and discovered memories along the way.
95
+ """
96
+
97
+ start_id: str = Field(..., description="Starting memory ID")
98
+ end_id: str = Field(..., description="Ending memory ID")
99
+ steps: list[JourneyStep] = Field(default_factory=list, description="Journey steps")
100
+ path_coverage: float = Field(
101
+ default=0.0,
102
+ ge=0.0,
103
+ le=1.0,
104
+ description="Fraction of path with nearby memories",
105
+ )
106
+
107
+
108
+ class WanderStep(BaseModel):
109
+ """A single step in a random walk through memory space.
110
+
111
+ Represents transitioning from one memory to another based on
112
+ similarity-weighted random selection.
113
+ """
114
+
115
+ step: int = Field(..., ge=0, description="Step number in the walk")
116
+ memory: MemoryResult = Field(..., description="Memory at this step")
117
+ similarity_to_previous: float = Field(
118
+ default=0.0,
119
+ ge=0.0,
120
+ le=1.0,
121
+ description="Similarity to the previous step's memory",
122
+ )
123
+ selection_probability: float = Field(
124
+ default=0.0,
125
+ ge=0.0,
126
+ le=1.0,
127
+ description="Probability this memory was selected",
128
+ )
129
+
130
+
131
+ class WanderResult(BaseModel):
132
+ """Result of a wander (random walk) operation.
133
+
134
+ Contains the path taken during the random walk through memory space.
135
+ """
136
+
137
+ start_id: str = Field(..., description="Starting memory ID")
138
+ steps: list[WanderStep] = Field(default_factory=list, description="Walk steps")
139
+ total_distance: float = Field(
140
+ default=0.0, ge=0.0, description="Total distance traveled in embedding space"
141
+ )
142
+
143
+
144
+ class RegionCluster(BaseModel):
145
+ """A cluster discovered during regions analysis.
146
+
147
+ Represents a semantic region in memory space with coherent memories.
148
+ """
149
+
150
+ cluster_id: int = Field(..., description="Cluster identifier (-1 for noise)")
151
+ size: int = Field(..., ge=0, description="Number of memories in cluster")
152
+ representative_memory: MemoryResult = Field(
153
+ ..., description="Memory closest to cluster centroid"
154
+ )
155
+ sample_memories: list[MemoryResult] = Field(
156
+ default_factory=list, description="Sample memories from the cluster"
157
+ )
158
+ coherence: float = Field(
159
+ default=0.0,
160
+ ge=0.0,
161
+ le=1.0,
162
+ description="Internal cluster coherence (tightness)",
163
+ )
164
+ keywords: list[str] = Field(
165
+ default_factory=list, description="Keywords describing the cluster"
166
+ )
167
+
168
+
169
+ class RegionsResult(BaseModel):
170
+ """Result of a regions (clustering) operation.
171
+
172
+ Contains discovered clusters and clustering quality metrics.
173
+ """
174
+
175
+ clusters: list[RegionCluster] = Field(
176
+ default_factory=list, description="Discovered clusters"
177
+ )
178
+ noise_count: int = Field(
179
+ default=0, ge=0, description="Number of memories not in any cluster"
180
+ )
181
+ total_memories: int = Field(
182
+ default=0, ge=0, description="Total memories analyzed"
183
+ )
184
+ clustering_quality: float = Field(
185
+ default=0.0,
186
+ ge=-1.0,
187
+ le=1.0,
188
+ description="Overall clustering quality (silhouette score)",
189
+ )
190
+
191
+
192
+ class VisualizationNode(BaseModel):
193
+ """A node in the visualization."""
194
+
195
+ id: str
196
+ x: float
197
+ y: float
198
+ label: str
199
+ cluster: int = -1 # -1 for noise/unclustered
200
+ importance: float = 0.5
201
+ highlighted: bool = False
202
+
203
+
204
+ class VisualizationEdge(BaseModel):
205
+ """An edge connecting two nodes in visualization."""
206
+
207
+ from_id: str
208
+ to_id: str
209
+ weight: float = Field(ge=0.0, le=1.0)
210
+
211
+
212
+ class VisualizationCluster(BaseModel):
213
+ """Cluster metadata for visualization."""
214
+
215
+ id: int
216
+ label: str
217
+ color: str
218
+ center_x: float
219
+ center_y: float
220
+
221
+
222
+ class VisualizationData(BaseModel):
223
+ """Data for visualizing the memory space."""
224
+
225
+ nodes: list[VisualizationNode]
226
+ edges: list[VisualizationEdge] = Field(default_factory=list)
227
+ clusters: list[VisualizationCluster] = Field(default_factory=list)
228
+ bounds: dict[str, float] = Field(
229
+ default_factory=lambda: {"x_min": -1.0, "x_max": 1.0, "y_min": -1.0, "y_max": 1.0}
230
+ )
231
+
232
+
233
+ class VisualizationResult(BaseModel):
234
+ """Result of a visualization operation.
235
+
236
+ Contains the complete visualization output including nodes, edges,
237
+ and the formatted output string.
238
+ """
239
+
240
+ nodes: list[VisualizationNode] = Field(
241
+ default_factory=list, description="Visualization nodes"
242
+ )
243
+ edges: list[VisualizationEdge] = Field(
244
+ default_factory=list, description="Connections between nodes"
245
+ )
246
+ bounds: dict[str, float] = Field(
247
+ default_factory=lambda: {
248
+ "x_min": -1.0,
249
+ "x_max": 1.0,
250
+ "y_min": -1.0,
251
+ "y_max": 1.0,
252
+ },
253
+ description="Coordinate bounds of the visualization",
254
+ )
255
+ format: str = Field(
256
+ default="json",
257
+ description="Output format (json, mermaid, svg)",
258
+ )
259
+ output: str = Field(
260
+ default="", description="Formatted output string in the specified format"
261
+ )
262
+
263
+
264
+ class FilterOperator(str, Enum):
265
+ """Filter operators for querying memories."""
266
+
267
+ EQ = "eq" # Equal
268
+ NE = "ne" # Not equal
269
+ GT = "gt" # Greater than
270
+ GTE = "gte" # Greater than or equal
271
+ LT = "lt" # Less than
272
+ LTE = "lte" # Less than or equal
273
+ IN = "in" # In list
274
+ NIN = "nin" # Not in list
275
+ CONTAINS = "contains" # String/list contains
276
+
277
+
278
+ class Filter(BaseModel):
279
+ """A single filter condition."""
280
+
281
+ field: str
282
+ operator: FilterOperator
283
+ value: FilterValue
284
+
285
+
286
+ class FilterGroup(BaseModel):
287
+ """A group of filters with logical operator."""
288
+
289
+ operator: Literal["and", "or"] = "and"
290
+ filters: list[Filter | FilterGroup] = Field(..., min_length=1)
291
+
292
+ @model_validator(mode="after")
293
+ def validate_filters_not_empty(self) -> FilterGroup:
294
+ """Validate that filters list is not empty."""
295
+ if not self.filters:
296
+ raise ValueError("FilterGroup must contain at least one filter")
297
+ return self
298
+
299
+
300
+ # Update forward references
301
+ FilterGroup.model_rebuild()
302
+
303
+
304
+ # =============================================================================
305
+ # Lifecycle Enums
306
+ # =============================================================================
307
+
308
+
309
+ class DecayFunction(str, Enum):
310
+ """Decay function types."""
311
+
312
+ EXPONENTIAL = "exponential"
313
+ LINEAR = "linear"
314
+ STEP = "step"
315
+
316
+
317
+ class BoostType(str, Enum):
318
+ """Reinforcement boost types."""
319
+
320
+ ADDITIVE = "additive"
321
+ MULTIPLICATIVE = "multiplicative"
322
+ SET_VALUE = "set_value"
323
+
324
+
325
+ class ConsolidationStrategy(str, Enum):
326
+ """Consolidation strategies."""
327
+
328
+ KEEP_NEWEST = "keep_newest"
329
+ KEEP_OLDEST = "keep_oldest"
330
+ KEEP_HIGHEST_IMPORTANCE = "keep_highest_importance"
331
+ MERGE_CONTENT = "merge_content"
332
+
333
+
334
+ class ExtractionPatternType(str, Enum):
335
+ """Types of extracted content."""
336
+
337
+ DECISION = "decision"
338
+ DEFINITION = "definition"
339
+ SOLUTION = "solution"
340
+ ERROR = "error"
341
+ PATTERN = "pattern"
342
+ EXPLICIT = "explicit"
343
+ IMPORTANT = "important"
344
+
345
+
346
+ # =============================================================================
347
+ # Lifecycle Result Dataclasses
348
+ # =============================================================================
349
+
350
+
351
+ @dataclass
352
+ class DecayedMemory:
353
+ """A memory with calculated decay."""
354
+
355
+ id: str
356
+ content_preview: str
357
+ old_importance: float
358
+ new_importance: float
359
+ decay_factor: float
360
+ days_since_access: int
361
+ access_count: int
362
+
363
+
364
+ @dataclass
365
+ class DecayResult:
366
+ """Result of decay operation."""
367
+
368
+ memories_analyzed: int
369
+ memories_decayed: int
370
+ avg_decay_factor: float
371
+ decayed_memories: list[DecayedMemory] = field(default_factory=list)
372
+ dry_run: bool = True
373
+ failed_updates: list[str] = field(default_factory=list) # IDs that failed to update
374
+
375
+
376
+ @dataclass
377
+ class ReinforcedMemory:
378
+ """A memory that was reinforced."""
379
+
380
+ id: str
381
+ content_preview: str
382
+ old_importance: float
383
+ new_importance: float
384
+ boost_applied: float
385
+
386
+
387
+ @dataclass
388
+ class ReinforceResult:
389
+ """Result of reinforcement operation."""
390
+
391
+ memories_reinforced: int
392
+ avg_boost: float
393
+ reinforced: list[ReinforcedMemory] = field(default_factory=list)
394
+ not_found: list[str] = field(default_factory=list)
395
+ failed_updates: list[str] = field(default_factory=list) # IDs that failed to update
396
+
397
+
398
+ @dataclass
399
+ class ExtractedMemory:
400
+ """A memory candidate extracted from text."""
401
+
402
+ content: str
403
+ confidence: float
404
+ pattern_matched: str
405
+ start_pos: int
406
+ end_pos: int
407
+ stored: bool # False if deduplicated
408
+ memory_id: str | None = None # Set if stored
409
+
410
+
411
+ @dataclass
412
+ class ExtractResult:
413
+ """Result of memory extraction."""
414
+
415
+ candidates_found: int
416
+ memories_created: int
417
+ deduplicated_count: int
418
+ extractions: list[ExtractedMemory] = field(default_factory=list)
419
+
420
+
421
+ @dataclass
422
+ class ConsolidationGroup:
423
+ """A group of similar memories."""
424
+
425
+ representative_id: str
426
+ member_ids: list[str]
427
+ avg_similarity: float
428
+ action_taken: str # "merged", "deleted", "preview"
429
+
430
+
431
+ @dataclass
432
+ class ConsolidateResult:
433
+ """Result of consolidation."""
434
+
435
+ groups_found: int
436
+ memories_merged: int
437
+ memories_deleted: int
438
+ groups: list[ConsolidationGroup] = field(default_factory=list)
439
+ dry_run: bool = True
440
+
441
+
442
+ # =============================================================================
443
+ # Phase 5 Utility Result Dataclasses
444
+ # =============================================================================
445
+
446
+
447
+ @dataclass
448
+ class IndexInfo:
449
+ """Information about a single database index."""
450
+
451
+ name: str
452
+ index_type: str
453
+ column: str
454
+ num_indexed_rows: int
455
+ status: str # "ready", "building", "needs_update"
456
+
457
+
458
+ @dataclass
459
+ class StatsResult:
460
+ """Result of database statistics query."""
461
+
462
+ total_memories: int
463
+ memories_by_namespace: dict[str, int]
464
+ storage_bytes: int
465
+ storage_mb: float
466
+ estimated_vector_bytes: int
467
+ has_vector_index: bool
468
+ has_fts_index: bool
469
+ indices: list[IndexInfo]
470
+ num_fragments: int
471
+ needs_compaction: bool
472
+ table_version: int
473
+ oldest_memory_date: datetime | None = None
474
+ newest_memory_date: datetime | None = None
475
+ avg_content_length: float | None = None
476
+
477
+
478
+ @dataclass
479
+ class NamespaceInfo:
480
+ """Information about a single namespace."""
481
+
482
+ name: str
483
+ memory_count: int
484
+ oldest_memory: datetime | None = None
485
+ newest_memory: datetime | None = None
486
+
487
+
488
+ @dataclass
489
+ class NamespacesResult:
490
+ """Result of namespace listing."""
491
+
492
+ namespaces: list[NamespaceInfo]
493
+ total_namespaces: int
494
+ total_memories: int
495
+
496
+
497
+ @dataclass
498
+ class DeleteNamespaceResult:
499
+ """Result of namespace deletion."""
500
+
501
+ namespace: str
502
+ memories_deleted: int
503
+ success: bool
504
+ message: str
505
+ dry_run: bool = False
506
+
507
+
508
+ @dataclass
509
+ class RenameNamespaceResult:
510
+ """Result of namespace rename."""
511
+
512
+ old_namespace: str
513
+ new_namespace: str
514
+ memories_renamed: int
515
+ success: bool
516
+ message: str
517
+
518
+
519
+ @dataclass
520
+ class ExportResult:
521
+ """Result of memory export."""
522
+
523
+ format: str # parquet, json, csv
524
+ output_path: str
525
+ memories_exported: int
526
+ file_size_bytes: int
527
+ file_size_mb: float
528
+ namespaces_included: list[str]
529
+ duration_seconds: float
530
+ compression: str | None = None
531
+
532
+
533
+ @dataclass
534
+ class ImportedMemory:
535
+ """Information about a single imported memory."""
536
+
537
+ id: str
538
+ content_preview: str
539
+ namespace: str
540
+ was_deduplicated: bool = False
541
+ original_id: str | None = None
542
+
543
+
544
+ @dataclass
545
+ class ImportValidationError:
546
+ """A validation error during import."""
547
+
548
+ row_number: int
549
+ field: str
550
+ error: str
551
+ value: str | None = None
552
+
553
+
554
+ @dataclass
555
+ class ImportResult:
556
+ """Result of memory import."""
557
+
558
+ source_path: str
559
+ format: str
560
+ total_records_in_file: int
561
+ memories_imported: int
562
+ memories_skipped: int
563
+ memories_failed: int
564
+ validation_errors: list[ImportValidationError]
565
+ duration_seconds: float
566
+ namespace_override: str | None = None
567
+ imported_memories: list[ImportedMemory] | None = None
568
+
569
+
570
+ @dataclass
571
+ class HybridMemoryMatch:
572
+ """A memory matched by hybrid search."""
573
+
574
+ id: str
575
+ content: str
576
+ similarity: float
577
+ namespace: str
578
+ tags: list[str]
579
+ importance: float
580
+ created_at: datetime
581
+ metadata: dict[str, Any]
582
+ vector_score: float | None = None
583
+ fts_score: float | None = None
584
+ combined_score: float = 0.0
585
+
586
+
587
+ @dataclass
588
+ class HybridRecallResult:
589
+ """Result of hybrid recall operation."""
590
+
591
+ query: str
592
+ alpha: float
593
+ memories: list[HybridMemoryMatch]
594
+ total: int
595
+ search_type: str = "hybrid"
596
+
597
+
598
+ # =============================================================================
599
+ # Phase 5 Service Configuration Dataclasses
600
+ # =============================================================================
601
+
602
+
603
+ @dataclass
604
+ class UtilityConfig:
605
+ """Configuration for utility operations."""
606
+
607
+ hybrid_default_alpha: float = 0.5
608
+ hybrid_min_alpha: float = 0.0
609
+ hybrid_max_alpha: float = 1.0
610
+ stats_include_index_details: bool = True
611
+ namespace_batch_size: int = 1000
612
+ delete_namespace_require_confirmation: bool = True
613
+
614
+
615
+ @dataclass
616
+ class ExportImportConfig:
617
+ """Configuration for export/import operations."""
618
+
619
+ default_export_format: str = "parquet"
620
+ export_batch_size: int = 5000
621
+ import_batch_size: int = 1000
622
+ import_deduplicate: bool = False
623
+ import_dedup_threshold: float = 0.95
624
+ validate_on_import: bool = True
625
+ parquet_compression: str = "zstd"
626
+ csv_include_vectors: bool = False
627
+ max_export_records: int = 0 # 0 = unlimited
628
+ max_import_records: int = 100_000 # Maximum records per import