spatial-memory-mcp 1.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of spatial-memory-mcp might be problematic. Click here for more details.
- spatial_memory/__init__.py +97 -0
- spatial_memory/__main__.py +270 -0
- spatial_memory/adapters/__init__.py +7 -0
- spatial_memory/adapters/lancedb_repository.py +878 -0
- spatial_memory/config.py +728 -0
- spatial_memory/core/__init__.py +118 -0
- spatial_memory/core/cache.py +317 -0
- spatial_memory/core/circuit_breaker.py +297 -0
- spatial_memory/core/connection_pool.py +220 -0
- spatial_memory/core/consolidation_strategies.py +402 -0
- spatial_memory/core/database.py +3069 -0
- spatial_memory/core/db_idempotency.py +242 -0
- spatial_memory/core/db_indexes.py +575 -0
- spatial_memory/core/db_migrations.py +584 -0
- spatial_memory/core/db_search.py +509 -0
- spatial_memory/core/db_versioning.py +177 -0
- spatial_memory/core/embeddings.py +557 -0
- spatial_memory/core/errors.py +317 -0
- spatial_memory/core/file_security.py +702 -0
- spatial_memory/core/filesystem.py +178 -0
- spatial_memory/core/health.py +289 -0
- spatial_memory/core/helpers.py +79 -0
- spatial_memory/core/import_security.py +432 -0
- spatial_memory/core/lifecycle_ops.py +1067 -0
- spatial_memory/core/logging.py +194 -0
- spatial_memory/core/metrics.py +192 -0
- spatial_memory/core/models.py +628 -0
- spatial_memory/core/rate_limiter.py +326 -0
- spatial_memory/core/response_types.py +497 -0
- spatial_memory/core/security.py +588 -0
- spatial_memory/core/spatial_ops.py +426 -0
- spatial_memory/core/tracing.py +300 -0
- spatial_memory/core/utils.py +110 -0
- spatial_memory/core/validation.py +403 -0
- spatial_memory/factory.py +407 -0
- spatial_memory/migrations/__init__.py +40 -0
- spatial_memory/ports/__init__.py +11 -0
- spatial_memory/ports/repositories.py +631 -0
- spatial_memory/py.typed +0 -0
- spatial_memory/server.py +1141 -0
- spatial_memory/services/__init__.py +70 -0
- spatial_memory/services/export_import.py +1023 -0
- spatial_memory/services/lifecycle.py +1120 -0
- spatial_memory/services/memory.py +412 -0
- spatial_memory/services/spatial.py +1147 -0
- spatial_memory/services/utility.py +409 -0
- spatial_memory/tools/__init__.py +5 -0
- spatial_memory/tools/definitions.py +695 -0
- spatial_memory/verify.py +140 -0
- spatial_memory_mcp-1.6.1.dist-info/METADATA +499 -0
- spatial_memory_mcp-1.6.1.dist-info/RECORD +54 -0
- spatial_memory_mcp-1.6.1.dist-info/WHEEL +4 -0
- spatial_memory_mcp-1.6.1.dist-info/entry_points.txt +2 -0
- spatial_memory_mcp-1.6.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"""Filesystem detection utilities for identifying network filesystems.
|
|
2
|
+
|
|
3
|
+
This module provides utilities to detect if a path is on a network filesystem
|
|
4
|
+
(NFS, SMB/CIFS) where file-based locking may not work reliably.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import platform
|
|
12
|
+
import subprocess
|
|
13
|
+
from enum import Enum
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class FilesystemType(Enum):
|
|
20
|
+
"""Types of filesystems that can be detected."""
|
|
21
|
+
|
|
22
|
+
LOCAL = "local"
|
|
23
|
+
NFS = "nfs"
|
|
24
|
+
SMB = "smb"
|
|
25
|
+
CIFS = "cifs"
|
|
26
|
+
NETWORK_UNKNOWN = "network_unknown"
|
|
27
|
+
UNKNOWN = "unknown"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def detect_filesystem_type(path: Path) -> FilesystemType:
|
|
31
|
+
"""Detect the filesystem type for a given path.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
path: Path to check. Will resolve to absolute path.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
FilesystemType indicating the detected filesystem.
|
|
38
|
+
Returns LOCAL for local filesystems, specific types for
|
|
39
|
+
network filesystems, or UNKNOWN if detection fails.
|
|
40
|
+
"""
|
|
41
|
+
try:
|
|
42
|
+
resolved = path.resolve()
|
|
43
|
+
|
|
44
|
+
if platform.system() == "Windows":
|
|
45
|
+
return _detect_windows(resolved)
|
|
46
|
+
else:
|
|
47
|
+
return _detect_unix(resolved)
|
|
48
|
+
except Exception as e:
|
|
49
|
+
logger.debug(f"Filesystem detection failed for {path}: {e}")
|
|
50
|
+
return FilesystemType.UNKNOWN
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _detect_windows(path: Path) -> FilesystemType:
|
|
54
|
+
"""Detect filesystem type on Windows.
|
|
55
|
+
|
|
56
|
+
Uses GetDriveTypeW to check if drive is remote.
|
|
57
|
+
"""
|
|
58
|
+
try:
|
|
59
|
+
import ctypes
|
|
60
|
+
|
|
61
|
+
# Get the drive letter (e.g., "C:\\")
|
|
62
|
+
drive = str(path)[:3] if len(str(path)) >= 3 else str(path)
|
|
63
|
+
|
|
64
|
+
# Ensure it ends with backslash for GetDriveTypeW
|
|
65
|
+
if not drive.endswith("\\"):
|
|
66
|
+
drive = drive + "\\"
|
|
67
|
+
|
|
68
|
+
# DRIVE_REMOTE = 4
|
|
69
|
+
drive_type = ctypes.windll.kernel32.GetDriveTypeW(drive)
|
|
70
|
+
|
|
71
|
+
if drive_type == 4: # DRIVE_REMOTE
|
|
72
|
+
logger.debug(f"Detected remote drive: {drive}")
|
|
73
|
+
return FilesystemType.NETWORK_UNKNOWN
|
|
74
|
+
else:
|
|
75
|
+
return FilesystemType.LOCAL
|
|
76
|
+
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.debug(f"Windows filesystem detection failed: {e}")
|
|
79
|
+
return FilesystemType.UNKNOWN
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _detect_unix(path: Path) -> FilesystemType:
|
|
83
|
+
"""Detect filesystem type on Unix-like systems.
|
|
84
|
+
|
|
85
|
+
Uses 'df -T' or 'mount' to determine filesystem type.
|
|
86
|
+
"""
|
|
87
|
+
try:
|
|
88
|
+
# Try using df -T first (more portable)
|
|
89
|
+
result = subprocess.run(
|
|
90
|
+
["df", "-T", str(path)],
|
|
91
|
+
capture_output=True,
|
|
92
|
+
text=True,
|
|
93
|
+
timeout=5,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
if result.returncode == 0:
|
|
97
|
+
output = result.stdout.lower()
|
|
98
|
+
# Check for common network filesystem types
|
|
99
|
+
if "nfs" in output:
|
|
100
|
+
return FilesystemType.NFS
|
|
101
|
+
if "cifs" in output:
|
|
102
|
+
return FilesystemType.CIFS
|
|
103
|
+
if "smb" in output:
|
|
104
|
+
return FilesystemType.SMB
|
|
105
|
+
if "fuse.sshfs" in output:
|
|
106
|
+
return FilesystemType.NETWORK_UNKNOWN
|
|
107
|
+
# If none of the above, assume local
|
|
108
|
+
return FilesystemType.LOCAL
|
|
109
|
+
|
|
110
|
+
except subprocess.TimeoutExpired:
|
|
111
|
+
logger.debug("df command timed out - may indicate network filesystem issue")
|
|
112
|
+
return FilesystemType.NETWORK_UNKNOWN
|
|
113
|
+
except FileNotFoundError:
|
|
114
|
+
# df not available, try alternative
|
|
115
|
+
pass
|
|
116
|
+
except Exception as e:
|
|
117
|
+
logger.debug(f"df command failed: {e}")
|
|
118
|
+
|
|
119
|
+
# Fallback: try reading /proc/mounts on Linux
|
|
120
|
+
try:
|
|
121
|
+
if os.path.exists("/proc/mounts"):
|
|
122
|
+
with open("/proc/mounts") as f:
|
|
123
|
+
mounts = f.read().lower()
|
|
124
|
+
path_str = str(path).lower()
|
|
125
|
+
# Find the mount point for this path
|
|
126
|
+
for line in mounts.split("\n"):
|
|
127
|
+
parts = line.split()
|
|
128
|
+
if len(parts) >= 3:
|
|
129
|
+
mount_point = parts[1]
|
|
130
|
+
fs_type = parts[2]
|
|
131
|
+
if path_str.startswith(mount_point):
|
|
132
|
+
if "nfs" in fs_type:
|
|
133
|
+
return FilesystemType.NFS
|
|
134
|
+
if "cifs" in fs_type or "smb" in fs_type:
|
|
135
|
+
return FilesystemType.SMB
|
|
136
|
+
except Exception as e:
|
|
137
|
+
logger.debug(f"/proc/mounts check failed: {e}")
|
|
138
|
+
|
|
139
|
+
return FilesystemType.LOCAL
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def is_network_filesystem(path: Path) -> bool:
|
|
143
|
+
"""Check if a path is on a network filesystem.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
path: Path to check.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
True if the path appears to be on a network filesystem
|
|
150
|
+
(NFS, SMB, CIFS, or unknown network type).
|
|
151
|
+
"""
|
|
152
|
+
fs_type = detect_filesystem_type(path)
|
|
153
|
+
return fs_type in (
|
|
154
|
+
FilesystemType.NFS,
|
|
155
|
+
FilesystemType.SMB,
|
|
156
|
+
FilesystemType.CIFS,
|
|
157
|
+
FilesystemType.NETWORK_UNKNOWN,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def get_filesystem_warning_message(fs_type: FilesystemType, path: Path) -> str:
|
|
162
|
+
"""Generate a warning message for network filesystem detection.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
fs_type: The detected filesystem type.
|
|
166
|
+
path: The path that was checked.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
A warning message string explaining the risks.
|
|
170
|
+
"""
|
|
171
|
+
return (
|
|
172
|
+
f"WARNING: Storage path appears to be on a network filesystem ({fs_type.value}). "
|
|
173
|
+
f"Path: {path}\n"
|
|
174
|
+
f"File-based locking does not work reliably on network filesystems. "
|
|
175
|
+
f"Running multiple instances against this storage may cause data corruption. "
|
|
176
|
+
f"To suppress this warning, set SPATIAL_MEMORY_ACKNOWLEDGE_NETWORK_FS_RISK=true "
|
|
177
|
+
f"or use a local filesystem path."
|
|
178
|
+
)
|
|
@@ -0,0 +1,289 @@
|
|
|
1
|
+
"""Health check infrastructure for Spatial Memory MCP Server."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import time
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from spatial_memory.core.database import Database
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class HealthStatus(Enum):
|
|
20
|
+
"""Health status levels."""
|
|
21
|
+
|
|
22
|
+
HEALTHY = "healthy"
|
|
23
|
+
DEGRADED = "degraded"
|
|
24
|
+
UNHEALTHY = "unhealthy"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class CheckResult:
|
|
29
|
+
"""Result of a single health check."""
|
|
30
|
+
|
|
31
|
+
name: str
|
|
32
|
+
status: HealthStatus
|
|
33
|
+
message: str
|
|
34
|
+
latency_ms: float | None = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class HealthReport:
|
|
39
|
+
"""Aggregate health report for system components."""
|
|
40
|
+
|
|
41
|
+
status: HealthStatus
|
|
42
|
+
checks: list[CheckResult]
|
|
43
|
+
timestamp: datetime
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class HealthChecker:
|
|
47
|
+
"""Health checker for system components.
|
|
48
|
+
|
|
49
|
+
This class performs health checks on the database, embeddings service,
|
|
50
|
+
and storage system to ensure the server is ready to accept traffic.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
database: Database | None = None,
|
|
56
|
+
embeddings: Any = None,
|
|
57
|
+
storage_path: Path | None = None,
|
|
58
|
+
) -> None:
|
|
59
|
+
"""Initialize health checker.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
database: Database instance to check (optional).
|
|
63
|
+
embeddings: Embedding service to check (optional).
|
|
64
|
+
storage_path: Storage path to check for writability (optional).
|
|
65
|
+
"""
|
|
66
|
+
self.database = database
|
|
67
|
+
self.embeddings = embeddings
|
|
68
|
+
self.storage_path = storage_path
|
|
69
|
+
|
|
70
|
+
def check_database(self) -> CheckResult:
|
|
71
|
+
"""Check database connectivity and basic operations.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
CheckResult with database health status.
|
|
75
|
+
"""
|
|
76
|
+
if self.database is None:
|
|
77
|
+
return CheckResult(
|
|
78
|
+
name="database",
|
|
79
|
+
status=HealthStatus.UNHEALTHY,
|
|
80
|
+
message="Database not configured",
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
try:
|
|
84
|
+
start_time = time.perf_counter()
|
|
85
|
+
|
|
86
|
+
# Try to get row count to verify database is operational
|
|
87
|
+
count = self.database.count()
|
|
88
|
+
latency = (time.perf_counter() - start_time) * 1000
|
|
89
|
+
|
|
90
|
+
# Check if database is degraded based on latency
|
|
91
|
+
if latency > 5000: # 5 seconds
|
|
92
|
+
return CheckResult(
|
|
93
|
+
name="database",
|
|
94
|
+
status=HealthStatus.DEGRADED,
|
|
95
|
+
message=f"Database operational but slow ({count} rows)",
|
|
96
|
+
latency_ms=latency,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
return CheckResult(
|
|
100
|
+
name="database",
|
|
101
|
+
status=HealthStatus.HEALTHY,
|
|
102
|
+
message=f"Database operational ({count} rows)",
|
|
103
|
+
latency_ms=latency,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.error(f"Database health check failed: {e}")
|
|
108
|
+
return CheckResult(
|
|
109
|
+
name="database",
|
|
110
|
+
status=HealthStatus.UNHEALTHY,
|
|
111
|
+
message=f"Database error: {str(e)[:100]}",
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
def check_embeddings(self) -> CheckResult:
|
|
115
|
+
"""Check embedding service is functional.
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
CheckResult with embeddings service health status.
|
|
119
|
+
"""
|
|
120
|
+
if self.embeddings is None:
|
|
121
|
+
return CheckResult(
|
|
122
|
+
name="embeddings",
|
|
123
|
+
status=HealthStatus.UNHEALTHY,
|
|
124
|
+
message="Embeddings service not configured",
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
start_time = time.perf_counter()
|
|
129
|
+
|
|
130
|
+
# Try a test embedding
|
|
131
|
+
test_vector = self.embeddings.embed("health check test")
|
|
132
|
+
latency = (time.perf_counter() - start_time) * 1000
|
|
133
|
+
|
|
134
|
+
# Verify vector is correct shape
|
|
135
|
+
if len(test_vector) != self.embeddings.dimensions:
|
|
136
|
+
return CheckResult(
|
|
137
|
+
name="embeddings",
|
|
138
|
+
status=HealthStatus.UNHEALTHY,
|
|
139
|
+
message=(
|
|
140
|
+
f"Embedding dimension mismatch: "
|
|
141
|
+
f"got {len(test_vector)}, expected {self.embeddings.dimensions}"
|
|
142
|
+
),
|
|
143
|
+
latency_ms=latency,
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
# Check if service is degraded based on latency
|
|
147
|
+
if latency > 10000: # 10 seconds
|
|
148
|
+
return CheckResult(
|
|
149
|
+
name="embeddings",
|
|
150
|
+
status=HealthStatus.DEGRADED,
|
|
151
|
+
message=f"Embeddings operational but slow ({self.embeddings.model_name})",
|
|
152
|
+
latency_ms=latency,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
return CheckResult(
|
|
156
|
+
name="embeddings",
|
|
157
|
+
status=HealthStatus.HEALTHY,
|
|
158
|
+
message=(
|
|
159
|
+
f"Embeddings operational "
|
|
160
|
+
f"({self.embeddings.model_name}, {self.embeddings.dimensions}d)"
|
|
161
|
+
),
|
|
162
|
+
latency_ms=latency,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.error(f"Embeddings health check failed: {e}")
|
|
167
|
+
return CheckResult(
|
|
168
|
+
name="embeddings",
|
|
169
|
+
status=HealthStatus.UNHEALTHY,
|
|
170
|
+
message=f"Embeddings error: {str(e)[:100]}",
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
def check_storage(self) -> CheckResult:
|
|
174
|
+
"""Check storage path is writable.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
CheckResult with storage health status.
|
|
178
|
+
"""
|
|
179
|
+
if self.storage_path is None:
|
|
180
|
+
return CheckResult(
|
|
181
|
+
name="storage",
|
|
182
|
+
status=HealthStatus.HEALTHY,
|
|
183
|
+
message="Storage check skipped (no path configured)",
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
try:
|
|
187
|
+
# Check if directory exists
|
|
188
|
+
if not self.storage_path.exists():
|
|
189
|
+
return CheckResult(
|
|
190
|
+
name="storage",
|
|
191
|
+
status=HealthStatus.UNHEALTHY,
|
|
192
|
+
message=f"Storage path does not exist: {self.storage_path}",
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
# Try to write and delete a test file
|
|
196
|
+
test_file = self.storage_path / ".health_check"
|
|
197
|
+
try:
|
|
198
|
+
test_file.write_text("health check", encoding="utf-8")
|
|
199
|
+
test_file.unlink()
|
|
200
|
+
except Exception as e:
|
|
201
|
+
return CheckResult(
|
|
202
|
+
name="storage",
|
|
203
|
+
status=HealthStatus.UNHEALTHY,
|
|
204
|
+
message=f"Storage path not writable: {e}",
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
return CheckResult(
|
|
208
|
+
name="storage",
|
|
209
|
+
status=HealthStatus.HEALTHY,
|
|
210
|
+
message=f"Storage writable ({self.storage_path})",
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
except Exception as e:
|
|
214
|
+
logger.error(f"Storage health check failed: {e}")
|
|
215
|
+
return CheckResult(
|
|
216
|
+
name="storage",
|
|
217
|
+
status=HealthStatus.UNHEALTHY,
|
|
218
|
+
message=f"Storage error: {str(e)[:100]}",
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
def get_health_report(self) -> HealthReport:
|
|
222
|
+
"""Run all checks and return aggregate report.
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
HealthReport with all check results.
|
|
226
|
+
"""
|
|
227
|
+
checks: list[CheckResult] = []
|
|
228
|
+
|
|
229
|
+
# Run all configured checks
|
|
230
|
+
if self.database is not None:
|
|
231
|
+
checks.append(self.check_database())
|
|
232
|
+
|
|
233
|
+
if self.embeddings is not None:
|
|
234
|
+
checks.append(self.check_embeddings())
|
|
235
|
+
|
|
236
|
+
if self.storage_path is not None:
|
|
237
|
+
checks.append(self.check_storage())
|
|
238
|
+
|
|
239
|
+
# Determine overall status
|
|
240
|
+
# If any check is unhealthy, overall is unhealthy
|
|
241
|
+
# If any check is degraded, overall is degraded
|
|
242
|
+
# Otherwise, healthy
|
|
243
|
+
overall_status = HealthStatus.HEALTHY
|
|
244
|
+
for check in checks:
|
|
245
|
+
if check.status == HealthStatus.UNHEALTHY:
|
|
246
|
+
overall_status = HealthStatus.UNHEALTHY
|
|
247
|
+
break
|
|
248
|
+
elif check.status == HealthStatus.DEGRADED:
|
|
249
|
+
overall_status = HealthStatus.DEGRADED
|
|
250
|
+
|
|
251
|
+
return HealthReport(
|
|
252
|
+
status=overall_status,
|
|
253
|
+
checks=checks,
|
|
254
|
+
timestamp=datetime.now(timezone.utc),
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
def is_ready(self) -> bool:
|
|
258
|
+
"""Check if system can accept traffic (readiness probe).
|
|
259
|
+
|
|
260
|
+
All critical checks (database and embeddings) must pass.
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
True if system is ready, False otherwise.
|
|
264
|
+
"""
|
|
265
|
+
report = self.get_health_report()
|
|
266
|
+
|
|
267
|
+
# All checks must be at least degraded (not unhealthy)
|
|
268
|
+
for check in report.checks:
|
|
269
|
+
# Storage is optional, skip it
|
|
270
|
+
if check.name == "storage":
|
|
271
|
+
continue
|
|
272
|
+
|
|
273
|
+
if check.status == HealthStatus.UNHEALTHY:
|
|
274
|
+
return False
|
|
275
|
+
|
|
276
|
+
return True
|
|
277
|
+
|
|
278
|
+
def is_alive(self) -> bool:
|
|
279
|
+
"""Check if process is alive (liveness probe).
|
|
280
|
+
|
|
281
|
+
Basic process health check.
|
|
282
|
+
|
|
283
|
+
Returns:
|
|
284
|
+
True if process is alive, False otherwise.
|
|
285
|
+
"""
|
|
286
|
+
# Basic check - if we can run this code, we're alive
|
|
287
|
+
# In a more sophisticated implementation, this could check
|
|
288
|
+
# for deadlocks, stuck threads, etc.
|
|
289
|
+
return True
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""Common utility functions for Spatial Memory MCP."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def deserialize_metadata(record: dict[str, Any]) -> None:
|
|
12
|
+
"""Deserialize metadata JSON string to dict in-place.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
record: Database record dict to modify in-place
|
|
16
|
+
"""
|
|
17
|
+
if record.get("metadata"):
|
|
18
|
+
if isinstance(record["metadata"], str):
|
|
19
|
+
record["metadata"] = json.loads(record["metadata"])
|
|
20
|
+
else:
|
|
21
|
+
record["metadata"] = {}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def serialize_metadata(metadata: dict[str, Any] | None) -> str:
|
|
25
|
+
"""Serialize metadata dict to JSON string.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
metadata: Metadata dict or None
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
JSON string
|
|
32
|
+
"""
|
|
33
|
+
return json.dumps(metadata) if metadata else "{}"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def convert_distance_to_similarity(record: dict[str, Any]) -> None:
|
|
37
|
+
"""Convert _distance field to similarity score in-place.
|
|
38
|
+
|
|
39
|
+
Similarity is calculated as 1 - distance and clamped to [0, 1].
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
record: Database record dict to modify in-place
|
|
43
|
+
"""
|
|
44
|
+
if "_distance" in record:
|
|
45
|
+
record["similarity"] = max(0.0, min(1.0, 1 - record["_distance"]))
|
|
46
|
+
del record["_distance"]
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def deserialize_record(record: dict[str, Any]) -> dict[str, Any]:
|
|
50
|
+
"""Fully deserialize a database record.
|
|
51
|
+
|
|
52
|
+
Applies:
|
|
53
|
+
- Metadata JSON deserialization
|
|
54
|
+
- Distance to similarity conversion
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
record: Database record dict
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
Deserialized record
|
|
61
|
+
"""
|
|
62
|
+
deserialize_metadata(record)
|
|
63
|
+
convert_distance_to_similarity(record)
|
|
64
|
+
return record
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def serialize_vector(vector: np.ndarray | list[float]) -> list[float]:
|
|
68
|
+
"""Convert vector to list format for storage.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
vector: numpy array or list
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
List of floats
|
|
75
|
+
"""
|
|
76
|
+
if isinstance(vector, np.ndarray):
|
|
77
|
+
result: list[float] = vector.tolist()
|
|
78
|
+
return result
|
|
79
|
+
return vector
|