empathy-framework 5.1.0__py3-none-any.whl → 5.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/METADATA +52 -3
  2. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/RECORD +71 -30
  3. empathy_os/__init__.py +1 -1
  4. empathy_os/cli_router.py +21 -0
  5. empathy_os/core_modules/__init__.py +15 -0
  6. empathy_os/mcp/__init__.py +10 -0
  7. empathy_os/mcp/server.py +506 -0
  8. empathy_os/memory/control_panel.py +1 -131
  9. empathy_os/memory/control_panel_support.py +145 -0
  10. empathy_os/memory/encryption.py +159 -0
  11. empathy_os/memory/long_term.py +41 -626
  12. empathy_os/memory/long_term_types.py +99 -0
  13. empathy_os/memory/mixins/__init__.py +25 -0
  14. empathy_os/memory/mixins/backend_init_mixin.py +244 -0
  15. empathy_os/memory/mixins/capabilities_mixin.py +199 -0
  16. empathy_os/memory/mixins/handoff_mixin.py +208 -0
  17. empathy_os/memory/mixins/lifecycle_mixin.py +49 -0
  18. empathy_os/memory/mixins/long_term_mixin.py +352 -0
  19. empathy_os/memory/mixins/promotion_mixin.py +109 -0
  20. empathy_os/memory/mixins/short_term_mixin.py +182 -0
  21. empathy_os/memory/short_term.py +7 -0
  22. empathy_os/memory/simple_storage.py +302 -0
  23. empathy_os/memory/storage_backend.py +167 -0
  24. empathy_os/memory/unified.py +21 -1120
  25. empathy_os/meta_workflows/cli_commands/__init__.py +56 -0
  26. empathy_os/meta_workflows/cli_commands/agent_commands.py +321 -0
  27. empathy_os/meta_workflows/cli_commands/analytics_commands.py +442 -0
  28. empathy_os/meta_workflows/cli_commands/config_commands.py +232 -0
  29. empathy_os/meta_workflows/cli_commands/memory_commands.py +182 -0
  30. empathy_os/meta_workflows/cli_commands/template_commands.py +354 -0
  31. empathy_os/meta_workflows/cli_commands/workflow_commands.py +382 -0
  32. empathy_os/meta_workflows/cli_meta_workflows.py +52 -1802
  33. empathy_os/meta_workflows/intent_detector.py +71 -0
  34. empathy_os/models/telemetry/__init__.py +71 -0
  35. empathy_os/models/telemetry/analytics.py +594 -0
  36. empathy_os/models/telemetry/backend.py +196 -0
  37. empathy_os/models/telemetry/data_models.py +431 -0
  38. empathy_os/models/telemetry/storage.py +489 -0
  39. empathy_os/orchestration/__init__.py +35 -0
  40. empathy_os/orchestration/execution_strategies.py +481 -0
  41. empathy_os/orchestration/meta_orchestrator.py +488 -1
  42. empathy_os/routing/workflow_registry.py +36 -0
  43. empathy_os/telemetry/cli.py +19 -724
  44. empathy_os/telemetry/commands/__init__.py +14 -0
  45. empathy_os/telemetry/commands/dashboard_commands.py +696 -0
  46. empathy_os/tools.py +183 -0
  47. empathy_os/workflows/__init__.py +5 -0
  48. empathy_os/workflows/autonomous_test_gen.py +860 -161
  49. empathy_os/workflows/base.py +6 -2
  50. empathy_os/workflows/code_review.py +4 -1
  51. empathy_os/workflows/document_gen/__init__.py +25 -0
  52. empathy_os/workflows/document_gen/config.py +30 -0
  53. empathy_os/workflows/document_gen/report_formatter.py +162 -0
  54. empathy_os/workflows/document_gen/workflow.py +1426 -0
  55. empathy_os/workflows/document_gen.py +22 -1598
  56. empathy_os/workflows/security_audit.py +2 -2
  57. empathy_os/workflows/security_audit_phase3.py +7 -4
  58. empathy_os/workflows/seo_optimization.py +633 -0
  59. empathy_os/workflows/test_gen/__init__.py +52 -0
  60. empathy_os/workflows/test_gen/ast_analyzer.py +249 -0
  61. empathy_os/workflows/test_gen/config.py +88 -0
  62. empathy_os/workflows/test_gen/data_models.py +38 -0
  63. empathy_os/workflows/test_gen/report_formatter.py +289 -0
  64. empathy_os/workflows/test_gen/test_templates.py +381 -0
  65. empathy_os/workflows/test_gen/workflow.py +655 -0
  66. empathy_os/workflows/test_gen.py +42 -1905
  67. empathy_os/memory/types 2.py +0 -441
  68. empathy_os/models/telemetry.py +0 -1660
  69. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/WHEEL +0 -0
  70. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/entry_points.txt +0 -0
  71. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/licenses/LICENSE +0 -0
  72. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +0 -0
  73. {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/top_level.txt +0 -0
@@ -27,381 +27,40 @@ Copyright 2025 Smart AI Memory, LLC
27
27
  Licensed under Fair Source 0.9
28
28
  """
29
29
 
30
- import base64
31
- import binascii
32
30
  import concurrent.futures
33
31
  import hashlib
34
- import json
35
32
  import os
36
- from dataclasses import dataclass, field
37
33
  from datetime import datetime, timedelta
38
- from enum import Enum
39
- from pathlib import Path
40
34
  from typing import Any
41
35
 
42
36
  import structlog
43
37
 
44
- from empathy_os.config import _validate_file_path
45
-
38
+ from .encryption import HAS_ENCRYPTION, EncryptionManager
39
+
40
+ # Import extracted modules for backward compatibility
41
+ from .long_term_types import (
42
+ DEFAULT_CLASSIFICATION_RULES,
43
+ Classification,
44
+ ClassificationRules,
45
+ PatternMetadata,
46
+ PermissionError,
47
+ SecurePattern,
48
+ SecurityError,
49
+ )
46
50
  from .security.audit_logger import AuditEvent, AuditLogger
47
51
  from .security.pii_scrubber import PIIScrubber
48
52
  from .security.secrets_detector import SecretsDetector
53
+ from .simple_storage import LongTermMemory
54
+ from .storage_backend import MemDocsStorage
49
55
 
50
56
  logger = structlog.get_logger(__name__)
51
57
 
52
- # Check for cryptography library
53
- try:
54
- from cryptography.exceptions import InvalidTag
55
- from cryptography.hazmat.primitives.ciphers.aead import AESGCM
56
-
57
- HAS_ENCRYPTION = True
58
- except ImportError:
59
- HAS_ENCRYPTION = False
60
- logger.warning("cryptography library not available - encryption disabled")
61
-
62
-
63
- class Classification(Enum):
64
- """Three-tier classification system for MemDocs patterns"""
65
-
66
- PUBLIC = "PUBLIC" # Shareable across organization, anonymized
67
- INTERNAL = "INTERNAL" # Team/project only, no PII or secrets
68
- SENSITIVE = "SENSITIVE" # Encrypted at rest, access-controlled (HIPAA, finance)
69
-
70
-
71
- @dataclass
72
- class ClassificationRules:
73
- """Security rules for each classification level"""
74
-
75
- classification: Classification
76
- encryption_required: bool
77
- retention_days: int
78
- access_level: str # "all_users", "project_team", "explicit_permission"
79
- audit_all_access: bool = False
80
-
81
-
82
- # Default classification rules based on enterprise security policy
83
- DEFAULT_CLASSIFICATION_RULES: dict[Classification, ClassificationRules] = {
84
- Classification.PUBLIC: ClassificationRules(
85
- classification=Classification.PUBLIC,
86
- encryption_required=False,
87
- retention_days=365,
88
- access_level="all_users",
89
- audit_all_access=False,
90
- ),
91
- Classification.INTERNAL: ClassificationRules(
92
- classification=Classification.INTERNAL,
93
- encryption_required=False,
94
- retention_days=180,
95
- access_level="project_team",
96
- audit_all_access=False,
97
- ),
98
- Classification.SENSITIVE: ClassificationRules(
99
- classification=Classification.SENSITIVE,
100
- encryption_required=True,
101
- retention_days=90,
102
- access_level="explicit_permission",
103
- audit_all_access=True,
104
- ),
105
- }
106
-
107
-
108
- @dataclass
109
- class PatternMetadata:
110
- """Metadata for stored MemDocs patterns"""
111
-
112
- pattern_id: str
113
- created_by: str
114
- created_at: str
115
- classification: str
116
- retention_days: int
117
- encrypted: bool
118
- pattern_type: str
119
- sanitization_applied: bool
120
- pii_removed: int
121
- secrets_detected: int
122
- access_control: dict[str, Any] = field(default_factory=dict)
123
- custom_metadata: dict[str, Any] = field(default_factory=dict)
124
-
125
-
126
- @dataclass
127
- class SecurePattern:
128
- """Represents a securely stored pattern"""
129
-
130
- pattern_id: str
131
- content: str
132
- metadata: PatternMetadata
133
-
134
-
135
- class SecurityError(Exception):
136
- """Raised when security policy is violated"""
137
-
138
-
139
- class PermissionError(Exception):
140
- """Raised when access is denied"""
141
-
142
-
143
- class EncryptionManager:
144
- """Manages encryption/decryption for SENSITIVE patterns.
145
-
146
- Uses AES-256-GCM (Galois/Counter Mode) for authenticated encryption.
147
- Keys are derived from a master key using HKDF.
148
- """
149
-
150
- def __init__(self, master_key: bytes | None = None):
151
- """Initialize encryption manager.
152
-
153
- Args:
154
- master_key: 32-byte master key (or None to generate/load)
155
-
156
- """
157
- if not HAS_ENCRYPTION:
158
- logger.warning("Encryption not available - install cryptography library")
159
- self.enabled = False
160
- return
161
-
162
- self.enabled = True
163
- self.master_key = master_key or self._load_or_generate_key()
164
-
165
- def _load_or_generate_key(self) -> bytes:
166
- """Load master key from environment or generate new one.
167
-
168
- Production: Set EMPATHY_MASTER_KEY environment variable
169
- Development: Generates ephemeral key (warning logged)
170
- """
171
- # Check environment variable first
172
- if env_key := os.getenv("EMPATHY_MASTER_KEY"):
173
- try:
174
- return base64.b64decode(env_key)
175
- except (binascii.Error, ValueError) as e:
176
- logger.error("invalid_master_key_in_env", error=str(e))
177
- raise ValueError("Invalid EMPATHY_MASTER_KEY format") from e
178
-
179
- # Check key file
180
- key_file = Path.home() / ".empathy" / "master.key"
181
- if key_file.exists():
182
- try:
183
- return key_file.read_bytes()
184
- except (OSError, PermissionError) as e:
185
- logger.error("failed_to_load_key_file", error=str(e))
186
-
187
- # Generate ephemeral key (NOT for production)
188
- logger.warning(
189
- "no_master_key_found",
190
- message="Generating ephemeral encryption key - set EMPATHY_MASTER_KEY for production",
191
- )
192
- return AESGCM.generate_key(bit_length=256)
193
-
194
- def encrypt(self, plaintext: str) -> str:
195
- """Encrypt plaintext using AES-256-GCM.
196
-
197
- Args:
198
- plaintext: Content to encrypt
199
-
200
- Returns:
201
- Base64-encoded ciphertext with format: nonce||ciphertext||tag
202
-
203
- Raises:
204
- SecurityError: If encryption fails
205
-
206
- """
207
- if not self.enabled:
208
- raise SecurityError("Encryption not available - install cryptography library")
209
-
210
- try:
211
- # Generate random 96-bit nonce (12 bytes)
212
- nonce = os.urandom(12)
213
-
214
- # Create AESGCM cipher
215
- aesgcm = AESGCM(self.master_key)
216
-
217
- # Encrypt and authenticate
218
- ciphertext = aesgcm.encrypt(nonce, plaintext.encode("utf-8"), None)
219
-
220
- # Combine nonce + ciphertext for storage
221
- encrypted_data = nonce + ciphertext
222
-
223
- # Return base64-encoded
224
- return base64.b64encode(encrypted_data).decode("utf-8")
225
-
226
- except (ValueError, TypeError, UnicodeEncodeError) as e:
227
- logger.error("encryption_failed", error=str(e))
228
- raise SecurityError(f"Encryption failed: {e}") from e
229
-
230
- def decrypt(self, ciphertext_b64: str) -> str:
231
- """Decrypt ciphertext using AES-256-GCM.
232
-
233
- Args:
234
- ciphertext_b64: Base64-encoded encrypted data
235
-
236
- Returns:
237
- Decrypted plaintext
238
-
239
- Raises:
240
- SecurityError: If decryption fails (invalid key, corrupted data, etc.)
241
-
242
- """
243
- if not self.enabled:
244
- raise SecurityError("Encryption not available - install cryptography library")
245
-
246
- try:
247
- # Decode from base64
248
- encrypted_data = base64.b64decode(ciphertext_b64)
249
-
250
- # Extract nonce (first 12 bytes) and ciphertext (rest)
251
- nonce = encrypted_data[:12]
252
- ciphertext = encrypted_data[12:]
253
-
254
- # Create AESGCM cipher
255
- aesgcm = AESGCM(self.master_key)
256
-
257
- # Decrypt and verify
258
- plaintext_bytes = aesgcm.decrypt(nonce, ciphertext, None)
259
-
260
- return plaintext_bytes.decode("utf-8")
261
-
262
- except (ValueError, TypeError, UnicodeDecodeError, binascii.Error, InvalidTag) as e:
263
- logger.error("decryption_failed", error=str(e))
264
- raise SecurityError(f"Decryption failed: {e}") from e
265
-
266
-
267
- class MemDocsStorage:
268
- """Mock/Simple MemDocs storage backend.
269
-
270
- In production, this would integrate with the actual MemDocs library.
271
- For now, provides a simple file-based storage for testing.
272
- """
273
-
274
- def __init__(self, storage_dir: str = "./memdocs_storage"):
275
- """Initialize storage backend.
276
-
277
- Args:
278
- storage_dir: Directory for pattern storage
279
-
280
- """
281
- self.storage_dir = Path(storage_dir)
282
- self.storage_dir.mkdir(parents=True, exist_ok=True)
283
- logger.info("memdocs_storage_initialized", storage_dir=str(self.storage_dir))
284
-
285
- def store(self, pattern_id: str, content: str, metadata: dict[str, Any]) -> bool:
286
- """Store a pattern.
287
-
288
- Args:
289
- pattern_id: Unique pattern identifier
290
- content: Pattern content (may be encrypted)
291
- metadata: Pattern metadata
292
-
293
- Returns:
294
- True if successful
295
-
296
- Raises:
297
- IOError: If storage fails
298
-
299
- """
300
- try:
301
- pattern_file = self.storage_dir / f"{pattern_id}.json"
302
-
303
- # Ensure parent directory exists
304
- pattern_file.parent.mkdir(parents=True, exist_ok=True)
305
-
306
- pattern_data = {"pattern_id": pattern_id, "content": content, "metadata": metadata}
307
-
308
- validated_pattern_file = _validate_file_path(str(pattern_file))
309
- with open(validated_pattern_file, "w", encoding="utf-8") as f:
310
- json.dump(pattern_data, f, indent=2)
311
-
312
- logger.debug("pattern_stored", pattern_id=pattern_id)
313
- return True
314
-
315
- except (OSError, PermissionError, json.JSONDecodeError) as e:
316
- logger.error("pattern_storage_failed", pattern_id=pattern_id, error=str(e))
317
- raise
318
-
319
- def retrieve(self, pattern_id: str) -> dict[str, Any] | None:
320
- """Retrieve a pattern.
321
-
322
- Args:
323
- pattern_id: Unique pattern identifier
324
-
325
- Returns:
326
- Pattern data dictionary or None if not found
327
-
328
- """
329
- try:
330
- pattern_file = self.storage_dir / f"{pattern_id}.json"
331
-
332
- if not pattern_file.exists():
333
- logger.warning("pattern_not_found", pattern_id=pattern_id)
334
- return None
335
-
336
- with open(pattern_file, encoding="utf-8") as f:
337
- pattern_data: dict[str, Any] = json.load(f)
338
-
339
- logger.debug("pattern_retrieved", pattern_id=pattern_id)
340
- return pattern_data
341
-
342
- except (OSError, PermissionError, json.JSONDecodeError) as e:
343
- logger.error("pattern_retrieval_failed", pattern_id=pattern_id, error=str(e))
344
- return None
58
+ # HAS_ENCRYPTION is now imported from encryption module
345
59
 
346
- def delete(self, pattern_id: str) -> bool:
347
- """Delete a pattern.
348
-
349
- Args:
350
- pattern_id: Unique pattern identifier
351
-
352
- Returns:
353
- True if deleted, False if not found
354
-
355
- """
356
- try:
357
- pattern_file = self.storage_dir / f"{pattern_id}.json"
358
-
359
- if not pattern_file.exists():
360
- return False
361
-
362
- pattern_file.unlink()
363
- logger.info("pattern_deleted", pattern_id=pattern_id)
364
- return True
365
-
366
- except (OSError, PermissionError) as e:
367
- logger.error("pattern_deletion_failed", pattern_id=pattern_id, error=str(e))
368
- return False
369
-
370
- def list_patterns(
371
- self,
372
- classification: str | None = None,
373
- created_by: str | None = None,
374
- ) -> list[str]:
375
- """List pattern IDs matching criteria.
376
-
377
- Args:
378
- classification: Filter by classification
379
- created_by: Filter by creator
380
-
381
- Returns:
382
- List of pattern IDs
383
-
384
- """
385
- pattern_ids = []
386
-
387
- for pattern_file in self.storage_dir.glob("*.json"):
388
- try:
389
- with open(pattern_file, encoding="utf-8") as f:
390
- data = json.load(f)
391
- metadata = data.get("metadata", {})
392
-
393
- # Apply filters
394
- if classification and metadata.get("classification") != classification:
395
- continue
396
- if created_by and metadata.get("created_by") != created_by:
397
- continue
398
-
399
- pattern_ids.append(data.get("pattern_id"))
400
-
401
- except Exception:
402
- continue
403
-
404
- return pattern_ids
60
+ # NOTE: Classification, ClassificationRules, PatternMetadata, SecurePattern,
61
+ # EncryptionManager, MemDocsStorage, and LongTermMemory have been extracted to
62
+ # separate modules for better modularity. They are imported above and re-exported
63
+ # below for backward compatibility.
405
64
 
406
65
 
407
66
  class SecureMemDocsIntegration:
@@ -1227,272 +886,28 @@ class SecureMemDocsIntegration:
1227
886
  # ============================================================================
1228
887
 
1229
888
 
1230
- class LongTermMemory:
1231
- """Simplified long-term persistent storage interface.
1232
-
1233
- Provides basic CRUD operations for long-term memory storage without
1234
- the full security pipeline of SecureMemDocsIntegration. Suitable for
1235
- simple persistent storage needs where PII scrubbing and encryption
1236
- are not required.
1237
-
1238
- Features:
1239
- - JSON file-based storage
1240
- - Classification support (PUBLIC/INTERNAL/SENSITIVE)
1241
- - Simple key-value interface
1242
- - List keys by classification
1243
-
1244
- Example:
1245
- >>> memory = LongTermMemory(storage_path="./data")
1246
- >>> memory.store("config", {"setting": "value"}, classification="INTERNAL")
1247
- >>> data = memory.retrieve("config")
1248
- >>> keys = memory.list_keys(classification="INTERNAL")
1249
-
1250
- Note:
1251
- For enterprise features (PII scrubbing, encryption, audit logging),
1252
- use SecureMemDocsIntegration instead.
1253
- """
1254
-
1255
- def __init__(self, storage_path: str = "./long_term_storage"):
1256
- """Initialize long-term memory storage.
1257
-
1258
- Args:
1259
- storage_path: Directory path for JSON storage
1260
-
1261
- """
1262
- self.storage_path = Path(storage_path)
1263
- self.storage_path.mkdir(parents=True, exist_ok=True)
1264
- logger.info("long_term_memory_initialized", storage_path=str(self.storage_path))
1265
-
1266
- def store(
1267
- self,
1268
- key: str,
1269
- data: Any,
1270
- classification: str | Classification | None = None,
1271
- ) -> bool:
1272
- """Store data in long-term memory.
1273
-
1274
- Args:
1275
- key: Storage key
1276
- data: Data to store (must be JSON-serializable)
1277
- classification: Data classification (PUBLIC/INTERNAL/SENSITIVE)
1278
-
1279
- Returns:
1280
- True if stored successfully, False otherwise
1281
-
1282
- Raises:
1283
- ValueError: If key is empty or data is not JSON-serializable
1284
- TypeError: If data cannot be serialized to JSON
1285
-
1286
- Example:
1287
- >>> memory = LongTermMemory()
1288
- >>> memory.store("user_prefs", {"theme": "dark"}, "INTERNAL")
1289
- True
1290
-
1291
- """
1292
- if not key or not key.strip():
1293
- raise ValueError("key cannot be empty")
1294
-
1295
- # Validate key for path traversal attacks
1296
- if ".." in key or key.startswith("/") or "\x00" in key:
1297
- logger.error("path_traversal_attempt", key=key)
1298
- return False
1299
-
1300
- try:
1301
- # Convert classification to string
1302
- classification_str = "INTERNAL" # Default
1303
- if classification is not None:
1304
- if isinstance(classification, Classification):
1305
- classification_str = classification.value
1306
- elif isinstance(classification, str):
1307
- # Validate classification string
1308
- try:
1309
- Classification[classification.upper()]
1310
- classification_str = classification.upper()
1311
- except KeyError:
1312
- logger.warning(
1313
- "invalid_classification",
1314
- classification=classification,
1315
- using_default="INTERNAL",
1316
- )
1317
-
1318
- # Create storage record
1319
- record = {
1320
- "key": key,
1321
- "data": data,
1322
- "classification": classification_str,
1323
- "created_at": datetime.utcnow().isoformat() + "Z",
1324
- "updated_at": datetime.utcnow().isoformat() + "Z",
1325
- }
1326
-
1327
- # Store to JSON file
1328
- file_path = self.storage_path / f"{key}.json"
1329
- validated_file_path = _validate_file_path(str(file_path))
1330
- with validated_file_path.open("w", encoding="utf-8") as f:
1331
- json.dump(record, f, indent=2)
1332
889
 
1333
- logger.debug("data_stored", key=key, classification=classification_str)
1334
- return True
1335
-
1336
- except (TypeError, ValueError) as e:
1337
- logger.error("store_failed", key=key, error=str(e))
1338
- raise
1339
- except (OSError, PermissionError) as e:
1340
- logger.error("storage_io_error", key=key, error=str(e))
1341
- return False
1342
-
1343
- def retrieve(self, key: str) -> Any | None:
1344
- """Retrieve data from long-term memory.
1345
-
1346
- Args:
1347
- key: Storage key
1348
-
1349
- Returns:
1350
- Stored data or None if not found
1351
-
1352
- Raises:
1353
- ValueError: If key is empty
1354
-
1355
- Example:
1356
- >>> memory = LongTermMemory()
1357
- >>> memory.store("config", {"value": 42})
1358
- >>> data = memory.retrieve("config")
1359
- >>> print(data["value"])
1360
- 42
1361
890
 
1362
- """
1363
- if not key or not key.strip():
1364
- raise ValueError("key cannot be empty")
1365
-
1366
- try:
1367
- file_path = self.storage_path / f"{key}.json"
1368
-
1369
- if not file_path.exists():
1370
- logger.debug("key_not_found", key=key)
1371
- return None
1372
-
1373
- with file_path.open(encoding="utf-8") as f:
1374
- record = json.load(f)
1375
-
1376
- logger.debug("data_retrieved", key=key)
1377
- return record.get("data")
1378
-
1379
- except (OSError, PermissionError, json.JSONDecodeError) as e:
1380
- logger.error("retrieve_failed", key=key, error=str(e))
1381
- return None
1382
-
1383
- def delete(self, key: str) -> bool:
1384
- """Delete data from long-term memory.
1385
-
1386
- Args:
1387
- key: Storage key
1388
-
1389
- Returns:
1390
- True if deleted, False if not found or error
1391
-
1392
- Raises:
1393
- ValueError: If key is empty
1394
-
1395
- Example:
1396
- >>> memory = LongTermMemory()
1397
- >>> memory.store("temp", {"data": "value"})
1398
- >>> memory.delete("temp")
1399
- True
1400
-
1401
- """
1402
- if not key or not key.strip():
1403
- raise ValueError("key cannot be empty")
1404
-
1405
- try:
1406
- file_path = self.storage_path / f"{key}.json"
1407
-
1408
- if not file_path.exists():
1409
- logger.debug("key_not_found_for_deletion", key=key)
1410
- return False
1411
-
1412
- file_path.unlink()
1413
- logger.info("data_deleted", key=key)
1414
- return True
1415
-
1416
- except (OSError, PermissionError) as e:
1417
- logger.error("delete_failed", key=key, error=str(e))
1418
- return False
1419
-
1420
- def list_keys(self, classification: str | Classification | None = None) -> list[str]:
1421
- """List all keys in long-term memory, optionally filtered by classification.
1422
-
1423
- Args:
1424
- classification: Filter by classification (PUBLIC/INTERNAL/SENSITIVE)
1425
-
1426
- Returns:
1427
- List of storage keys
1428
-
1429
- Example:
1430
- >>> memory = LongTermMemory()
1431
- >>> memory.store("public_data", {"x": 1}, "PUBLIC")
1432
- >>> memory.store("internal_data", {"y": 2}, "INTERNAL")
1433
- >>> keys = memory.list_keys(classification="PUBLIC")
1434
- >>> print(keys)
1435
- ['public_data']
1436
-
1437
- """
1438
- keys = []
1439
-
1440
- # Convert classification to string if needed
1441
- filter_classification = None
1442
- if classification is not None:
1443
- if isinstance(classification, Classification):
1444
- filter_classification = classification.value
1445
- elif isinstance(classification, str):
1446
- try:
1447
- Classification[classification.upper()]
1448
- filter_classification = classification.upper()
1449
- except KeyError:
1450
- logger.warning("invalid_classification_filter", classification=classification)
1451
- return []
1452
-
1453
- try:
1454
- for file_path in self.storage_path.glob("*.json"):
1455
- try:
1456
- with file_path.open(encoding="utf-8") as f:
1457
- record = json.load(f)
1458
-
1459
- # Apply classification filter if specified
1460
- if filter_classification is not None:
1461
- if record.get("classification") != filter_classification:
1462
- continue
1463
-
1464
- keys.append(record.get("key", file_path.stem))
1465
-
1466
- except (OSError, json.JSONDecodeError):
1467
- continue
1468
-
1469
- except (OSError, PermissionError) as e:
1470
- logger.error("list_keys_failed", error=str(e))
1471
-
1472
- return keys
1473
-
1474
- def clear(self) -> int:
1475
- """Clear all data from long-term memory.
1476
-
1477
- Returns:
1478
- Number of keys deleted
1479
-
1480
- Warning:
1481
- This operation cannot be undone!
1482
-
1483
- """
1484
- count = 0
1485
- try:
1486
- for file_path in self.storage_path.glob("*.json"):
1487
- try:
1488
- file_path.unlink()
1489
- count += 1
1490
- except (OSError, PermissionError):
1491
- continue
1492
-
1493
- logger.warning("long_term_memory_cleared", count=count)
1494
- return count
891
+ # ============================================================================
892
+ # Backward Compatibility Exports
893
+ # ============================================================================
1495
894
 
1496
- except (OSError, PermissionError) as e:
1497
- logger.error("clear_failed", error=str(e))
1498
- return count
895
+ __all__ = [
896
+ # Types (from long_term_types.py)
897
+ "Classification",
898
+ "ClassificationRules",
899
+ "DEFAULT_CLASSIFICATION_RULES",
900
+ "PatternMetadata",
901
+ "SecurePattern",
902
+ "SecurityError",
903
+ "PermissionError",
904
+ # Encryption (from encryption.py)
905
+ "EncryptionManager",
906
+ "HAS_ENCRYPTION",
907
+ # Storage (from storage_backend.py)
908
+ "MemDocsStorage",
909
+ # Simple storage (from simple_storage.py)
910
+ "LongTermMemory",
911
+ # Main integration (defined in this file)
912
+ "SecureMemDocsIntegration",
913
+ ]