empathy-framework 5.1.1__py3-none-any.whl → 5.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/METADATA +79 -6
  2. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/RECORD +83 -64
  3. empathy_os/__init__.py +1 -1
  4. empathy_os/cache/hybrid.py +5 -1
  5. empathy_os/cli/commands/batch.py +8 -0
  6. empathy_os/cli/commands/profiling.py +4 -0
  7. empathy_os/cli/commands/workflow.py +8 -4
  8. empathy_os/cli_router.py +9 -0
  9. empathy_os/config.py +15 -2
  10. empathy_os/core_modules/__init__.py +15 -0
  11. empathy_os/dashboard/simple_server.py +62 -30
  12. empathy_os/mcp/__init__.py +10 -0
  13. empathy_os/mcp/server.py +506 -0
  14. empathy_os/memory/control_panel.py +1 -131
  15. empathy_os/memory/control_panel_support.py +145 -0
  16. empathy_os/memory/encryption.py +159 -0
  17. empathy_os/memory/long_term.py +46 -631
  18. empathy_os/memory/long_term_types.py +99 -0
  19. empathy_os/memory/mixins/__init__.py +25 -0
  20. empathy_os/memory/mixins/backend_init_mixin.py +249 -0
  21. empathy_os/memory/mixins/capabilities_mixin.py +208 -0
  22. empathy_os/memory/mixins/handoff_mixin.py +208 -0
  23. empathy_os/memory/mixins/lifecycle_mixin.py +49 -0
  24. empathy_os/memory/mixins/long_term_mixin.py +352 -0
  25. empathy_os/memory/mixins/promotion_mixin.py +109 -0
  26. empathy_os/memory/mixins/short_term_mixin.py +182 -0
  27. empathy_os/memory/short_term.py +61 -12
  28. empathy_os/memory/simple_storage.py +302 -0
  29. empathy_os/memory/storage_backend.py +167 -0
  30. empathy_os/memory/types.py +8 -3
  31. empathy_os/memory/unified.py +21 -1120
  32. empathy_os/meta_workflows/cli_commands/__init__.py +56 -0
  33. empathy_os/meta_workflows/cli_commands/agent_commands.py +321 -0
  34. empathy_os/meta_workflows/cli_commands/analytics_commands.py +442 -0
  35. empathy_os/meta_workflows/cli_commands/config_commands.py +232 -0
  36. empathy_os/meta_workflows/cli_commands/memory_commands.py +182 -0
  37. empathy_os/meta_workflows/cli_commands/template_commands.py +354 -0
  38. empathy_os/meta_workflows/cli_commands/workflow_commands.py +382 -0
  39. empathy_os/meta_workflows/cli_meta_workflows.py +52 -1802
  40. empathy_os/models/telemetry/__init__.py +71 -0
  41. empathy_os/models/telemetry/analytics.py +594 -0
  42. empathy_os/models/telemetry/backend.py +196 -0
  43. empathy_os/models/telemetry/data_models.py +431 -0
  44. empathy_os/models/telemetry/storage.py +489 -0
  45. empathy_os/orchestration/__init__.py +35 -0
  46. empathy_os/orchestration/execution_strategies.py +481 -0
  47. empathy_os/orchestration/meta_orchestrator.py +488 -1
  48. empathy_os/routing/workflow_registry.py +36 -0
  49. empathy_os/telemetry/agent_coordination.py +2 -3
  50. empathy_os/telemetry/agent_tracking.py +26 -7
  51. empathy_os/telemetry/approval_gates.py +18 -24
  52. empathy_os/telemetry/cli.py +19 -724
  53. empathy_os/telemetry/commands/__init__.py +14 -0
  54. empathy_os/telemetry/commands/dashboard_commands.py +696 -0
  55. empathy_os/telemetry/event_streaming.py +7 -3
  56. empathy_os/telemetry/feedback_loop.py +28 -15
  57. empathy_os/tools.py +183 -0
  58. empathy_os/workflows/__init__.py +5 -0
  59. empathy_os/workflows/autonomous_test_gen.py +860 -161
  60. empathy_os/workflows/base.py +6 -2
  61. empathy_os/workflows/code_review.py +4 -1
  62. empathy_os/workflows/document_gen/__init__.py +25 -0
  63. empathy_os/workflows/document_gen/config.py +30 -0
  64. empathy_os/workflows/document_gen/report_formatter.py +162 -0
  65. empathy_os/workflows/{document_gen.py → document_gen/workflow.py} +5 -184
  66. empathy_os/workflows/output.py +4 -1
  67. empathy_os/workflows/progress.py +8 -2
  68. empathy_os/workflows/security_audit.py +2 -2
  69. empathy_os/workflows/security_audit_phase3.py +7 -4
  70. empathy_os/workflows/seo_optimization.py +633 -0
  71. empathy_os/workflows/test_gen/__init__.py +52 -0
  72. empathy_os/workflows/test_gen/ast_analyzer.py +249 -0
  73. empathy_os/workflows/test_gen/config.py +88 -0
  74. empathy_os/workflows/test_gen/data_models.py +38 -0
  75. empathy_os/workflows/test_gen/report_formatter.py +289 -0
  76. empathy_os/workflows/test_gen/test_templates.py +381 -0
  77. empathy_os/workflows/test_gen/workflow.py +655 -0
  78. empathy_os/workflows/test_gen.py +42 -1905
  79. empathy_os/cli/parsers/cache 2.py +0 -65
  80. empathy_os/cli_router 2.py +0 -416
  81. empathy_os/dashboard/app 2.py +0 -512
  82. empathy_os/dashboard/simple_server 2.py +0 -403
  83. empathy_os/dashboard/standalone_server 2.py +0 -536
  84. empathy_os/memory/types 2.py +0 -441
  85. empathy_os/models/adaptive_routing 2.py +0 -437
  86. empathy_os/models/telemetry.py +0 -1660
  87. empathy_os/project_index/scanner_parallel 2.py +0 -291
  88. empathy_os/telemetry/agent_coordination 2.py +0 -478
  89. empathy_os/telemetry/agent_tracking 2.py +0 -350
  90. empathy_os/telemetry/approval_gates 2.py +0 -563
  91. empathy_os/telemetry/event_streaming 2.py +0 -405
  92. empathy_os/telemetry/feedback_loop 2.py +0 -557
  93. empathy_os/vscode_bridge 2.py +0 -173
  94. empathy_os/workflows/progressive/__init__ 2.py +0 -92
  95. empathy_os/workflows/progressive/cli 2.py +0 -242
  96. empathy_os/workflows/progressive/core 2.py +0 -488
  97. empathy_os/workflows/progressive/orchestrator 2.py +0 -701
  98. empathy_os/workflows/progressive/reports 2.py +0 -528
  99. empathy_os/workflows/progressive/telemetry 2.py +0 -280
  100. empathy_os/workflows/progressive/test_gen 2.py +0 -514
  101. empathy_os/workflows/progressive/workflow 2.py +0 -628
  102. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/WHEEL +0 -0
  103. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/entry_points.txt +0 -0
  104. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/licenses/LICENSE +0 -0
  105. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +0 -0
  106. {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,302 @@
1
+ """Simplified long-term storage without security pipeline
2
+
3
+ Provides basic CRUD operations for long-term persistent storage without
4
+ the full security pipeline of SecureMemDocsIntegration. Suitable for
5
+ simple storage needs where PII scrubbing and encryption are not required.
6
+
7
+ Extracted from long_term.py for better modularity and testability.
8
+
9
+ Features:
10
+ - JSON file-based storage
11
+ - Classification support (PUBLIC/INTERNAL/SENSITIVE)
12
+ - Simple key-value interface
13
+ - List keys by classification
14
+ - Path validation for security
15
+
16
+ Copyright 2025 Smart AI Memory, LLC
17
+ Licensed under Fair Source 0.9
18
+ """
19
+
20
+ import json
21
+ from datetime import datetime
22
+ from pathlib import Path
23
+ from typing import Any
24
+
25
+ import structlog
26
+
27
+ from empathy_os.config import _validate_file_path
28
+
29
+ from .long_term_types import Classification
30
+
31
+ logger = structlog.get_logger(__name__)
32
+
33
+
34
+ class LongTermMemory:
35
+ """Simplified long-term persistent storage interface.
36
+
37
+ Provides basic CRUD operations for long-term memory storage without
38
+ the full security pipeline of SecureMemDocsIntegration. Suitable for
39
+ simple persistent storage needs where PII scrubbing and encryption
40
+ are not required.
41
+
42
+ Features:
43
+ - JSON file-based storage
44
+ - Classification support (PUBLIC/INTERNAL/SENSITIVE)
45
+ - Simple key-value interface
46
+ - List keys by classification
47
+
48
+ Example:
49
+ >>> memory = LongTermMemory(storage_path="./data")
50
+ >>> memory.store("config", {"setting": "value"}, classification="INTERNAL")
51
+ >>> data = memory.retrieve("config")
52
+ >>> keys = memory.list_keys(classification="INTERNAL")
53
+
54
+ Note:
55
+ For enterprise features (PII scrubbing, encryption, audit logging),
56
+ use SecureMemDocsIntegration instead.
57
+ """
58
+
59
+ def __init__(self, storage_path: str = "./long_term_storage"):
60
+ """Initialize long-term memory storage.
61
+
62
+ Args:
63
+ storage_path: Directory path for JSON storage
64
+
65
+ """
66
+ self.storage_path = Path(storage_path)
67
+ self.storage_path.mkdir(parents=True, exist_ok=True)
68
+ logger.info("long_term_memory_initialized", storage_path=str(self.storage_path))
69
+
70
+ def store(
71
+ self,
72
+ key: str,
73
+ data: Any,
74
+ classification: str | Classification | None = None,
75
+ ) -> bool:
76
+ """Store data in long-term memory.
77
+
78
+ Args:
79
+ key: Storage key
80
+ data: Data to store (must be JSON-serializable)
81
+ classification: Data classification (PUBLIC/INTERNAL/SENSITIVE)
82
+
83
+ Returns:
84
+ True if stored successfully, False otherwise
85
+
86
+ Raises:
87
+ ValueError: If key is empty or data is not JSON-serializable
88
+ TypeError: If data cannot be serialized to JSON
89
+
90
+ Example:
91
+ >>> memory = LongTermMemory()
92
+ >>> memory.store("user_prefs", {"theme": "dark"}, "INTERNAL")
93
+ True
94
+
95
+ """
96
+ if not key or not key.strip():
97
+ raise ValueError(f"key cannot be empty. Got: {key!r}")
98
+
99
+ # Validate key for path traversal attacks
100
+ if ".." in key or key.startswith("/") or "\x00" in key:
101
+ logger.error("path_traversal_attempt", key=key)
102
+ return False
103
+
104
+ try:
105
+ # Convert classification to string
106
+ classification_str = "INTERNAL" # Default
107
+ if classification is not None:
108
+ if isinstance(classification, Classification):
109
+ classification_str = classification.value
110
+ elif isinstance(classification, str):
111
+ # Validate classification string
112
+ try:
113
+ Classification[classification.upper()]
114
+ classification_str = classification.upper()
115
+ except KeyError:
116
+ logger.warning(
117
+ "invalid_classification",
118
+ classification=classification,
119
+ using_default="INTERNAL",
120
+ )
121
+
122
+ # Create storage record
123
+ record = {
124
+ "key": key,
125
+ "data": data,
126
+ "classification": classification_str,
127
+ "created_at": datetime.utcnow().isoformat() + "Z",
128
+ "updated_at": datetime.utcnow().isoformat() + "Z",
129
+ }
130
+
131
+ # Store to JSON file
132
+ file_path = self.storage_path / f"{key}.json"
133
+ validated_file_path = _validate_file_path(str(file_path))
134
+ with validated_file_path.open("w", encoding="utf-8") as f:
135
+ json.dump(record, f, indent=2)
136
+
137
+ logger.debug("data_stored", key=key, classification=classification_str)
138
+ return True
139
+
140
+ except (TypeError, ValueError) as e:
141
+ logger.error("store_failed", key=key, error=str(e))
142
+ raise
143
+ except (OSError, PermissionError) as e:
144
+ logger.error("storage_io_error", key=key, error=str(e))
145
+ return False
146
+
147
+ def retrieve(self, key: str) -> Any | None:
148
+ """Retrieve data from long-term memory.
149
+
150
+ Args:
151
+ key: Storage key
152
+
153
+ Returns:
154
+ Stored data or None if not found
155
+
156
+ Raises:
157
+ ValueError: If key is empty
158
+
159
+ Example:
160
+ >>> memory = LongTermMemory()
161
+ >>> memory.store("config", {"value": 42})
162
+ >>> data = memory.retrieve("config")
163
+ >>> print(data["value"])
164
+ 42
165
+
166
+ """
167
+ if not key or not key.strip():
168
+ raise ValueError(f"key cannot be empty. Got: {key!r}")
169
+
170
+ try:
171
+ file_path = self.storage_path / f"{key}.json"
172
+
173
+ if not file_path.exists():
174
+ logger.debug("key_not_found", key=key)
175
+ return None
176
+
177
+ with file_path.open(encoding="utf-8") as f:
178
+ record = json.load(f)
179
+
180
+ logger.debug("data_retrieved", key=key)
181
+ return record.get("data")
182
+
183
+ except (OSError, PermissionError, json.JSONDecodeError) as e:
184
+ logger.error("retrieve_failed", key=key, error=str(e))
185
+ return None
186
+
187
+ def delete(self, key: str) -> bool:
188
+ """Delete data from long-term memory.
189
+
190
+ Args:
191
+ key: Storage key
192
+
193
+ Returns:
194
+ True if deleted, False if not found or error
195
+
196
+ Raises:
197
+ ValueError: If key is empty
198
+
199
+ Example:
200
+ >>> memory = LongTermMemory()
201
+ >>> memory.store("temp", {"data": "value"})
202
+ >>> memory.delete("temp")
203
+ True
204
+
205
+ """
206
+ if not key or not key.strip():
207
+ raise ValueError(f"key cannot be empty. Got: {key!r}")
208
+
209
+ try:
210
+ file_path = self.storage_path / f"{key}.json"
211
+
212
+ if not file_path.exists():
213
+ logger.debug("key_not_found_for_deletion", key=key)
214
+ return False
215
+
216
+ file_path.unlink()
217
+ logger.info("data_deleted", key=key)
218
+ return True
219
+
220
+ except (OSError, PermissionError) as e:
221
+ logger.error("delete_failed", key=key, error=str(e))
222
+ return False
223
+
224
+ def list_keys(self, classification: str | Classification | None = None) -> list[str]:
225
+ """List all keys in long-term memory, optionally filtered by classification.
226
+
227
+ Args:
228
+ classification: Filter by classification (PUBLIC/INTERNAL/SENSITIVE)
229
+
230
+ Returns:
231
+ List of storage keys
232
+
233
+ Example:
234
+ >>> memory = LongTermMemory()
235
+ >>> memory.store("public_data", {"x": 1}, "PUBLIC")
236
+ >>> memory.store("internal_data", {"y": 2}, "INTERNAL")
237
+ >>> keys = memory.list_keys(classification="PUBLIC")
238
+ >>> print(keys)
239
+ ['public_data']
240
+
241
+ """
242
+ keys = []
243
+
244
+ # Convert classification to string if needed
245
+ filter_classification = None
246
+ if classification is not None:
247
+ if isinstance(classification, Classification):
248
+ filter_classification = classification.value
249
+ elif isinstance(classification, str):
250
+ try:
251
+ Classification[classification.upper()]
252
+ filter_classification = classification.upper()
253
+ except KeyError:
254
+ logger.warning("invalid_classification_filter", classification=classification)
255
+ return []
256
+
257
+ try:
258
+ for file_path in self.storage_path.glob("*.json"):
259
+ try:
260
+ with file_path.open(encoding="utf-8") as f:
261
+ record = json.load(f)
262
+
263
+ # Apply classification filter if specified
264
+ if filter_classification is not None:
265
+ if record.get("classification") != filter_classification:
266
+ continue
267
+
268
+ keys.append(record.get("key", file_path.stem))
269
+
270
+ except (OSError, json.JSONDecodeError):
271
+ continue
272
+
273
+ except (OSError, PermissionError) as e:
274
+ logger.error("list_keys_failed", error=str(e))
275
+
276
+ return keys
277
+
278
+ def clear(self) -> int:
279
+ """Clear all data from long-term memory.
280
+
281
+ Returns:
282
+ Number of keys deleted
283
+
284
+ Warning:
285
+ This operation cannot be undone!
286
+
287
+ """
288
+ count = 0
289
+ try:
290
+ for file_path in self.storage_path.glob("*.json"):
291
+ try:
292
+ file_path.unlink()
293
+ count += 1
294
+ except (OSError, PermissionError):
295
+ continue
296
+
297
+ logger.warning("long_term_memory_cleared", count=count)
298
+ return count
299
+
300
+ except (OSError, PermissionError) as e:
301
+ logger.error("clear_failed", error=str(e))
302
+ return count
@@ -0,0 +1,167 @@
1
+ """File-based storage backend for long-term memory patterns
2
+
3
+ Provides simple file-based storage for MemDocs patterns.
4
+ Extracted from long_term.py for better modularity and testability.
5
+
6
+ In production, this can be replaced with actual MemDocs library integration
7
+ or other storage backends (Redis, PostgreSQL, etc.).
8
+
9
+ Key Features:
10
+ - JSON-based file storage
11
+ - Pattern storage with metadata
12
+ - Query support (by classification, creator)
13
+ - Path validation for security
14
+
15
+ Copyright 2025 Smart AI Memory, LLC
16
+ Licensed under Fair Source 0.9
17
+ """
18
+
19
+ import json
20
+ from pathlib import Path
21
+ from typing import Any
22
+
23
+ import structlog
24
+
25
+ from empathy_os.config import _validate_file_path
26
+
27
+ logger = structlog.get_logger(__name__)
28
+
29
+
30
+ class MemDocsStorage:
31
+ """Mock/Simple MemDocs storage backend.
32
+
33
+ In production, this would integrate with the actual MemDocs library.
34
+ For now, provides a simple file-based storage for testing.
35
+ """
36
+
37
+ def __init__(self, storage_dir: str = "./memdocs_storage"):
38
+ """Initialize storage backend.
39
+
40
+ Args:
41
+ storage_dir: Directory for pattern storage
42
+
43
+ """
44
+ self.storage_dir = Path(storage_dir)
45
+ self.storage_dir.mkdir(parents=True, exist_ok=True)
46
+ logger.info("memdocs_storage_initialized", storage_dir=str(self.storage_dir))
47
+
48
+ def store(self, pattern_id: str, content: str, metadata: dict[str, Any]) -> bool:
49
+ """Store a pattern.
50
+
51
+ Args:
52
+ pattern_id: Unique pattern identifier
53
+ content: Pattern content (may be encrypted)
54
+ metadata: Pattern metadata
55
+
56
+ Returns:
57
+ True if successful
58
+
59
+ Raises:
60
+ IOError: If storage fails
61
+
62
+ """
63
+ try:
64
+ pattern_file = self.storage_dir / f"{pattern_id}.json"
65
+
66
+ # Ensure parent directory exists
67
+ pattern_file.parent.mkdir(parents=True, exist_ok=True)
68
+
69
+ pattern_data = {"pattern_id": pattern_id, "content": content, "metadata": metadata}
70
+
71
+ validated_pattern_file = _validate_file_path(str(pattern_file))
72
+ with open(validated_pattern_file, "w", encoding="utf-8") as f:
73
+ json.dump(pattern_data, f, indent=2)
74
+
75
+ logger.debug("pattern_stored", pattern_id=pattern_id)
76
+ return True
77
+
78
+ except (OSError, PermissionError, json.JSONDecodeError) as e:
79
+ logger.error("pattern_storage_failed", pattern_id=pattern_id, error=str(e))
80
+ raise
81
+
82
+ def retrieve(self, pattern_id: str) -> dict[str, Any] | None:
83
+ """Retrieve a pattern.
84
+
85
+ Args:
86
+ pattern_id: Unique pattern identifier
87
+
88
+ Returns:
89
+ Pattern data dictionary or None if not found
90
+
91
+ """
92
+ try:
93
+ pattern_file = self.storage_dir / f"{pattern_id}.json"
94
+
95
+ if not pattern_file.exists():
96
+ logger.warning("pattern_not_found", pattern_id=pattern_id)
97
+ return None
98
+
99
+ with open(pattern_file, encoding="utf-8") as f:
100
+ pattern_data: dict[str, Any] = json.load(f)
101
+
102
+ logger.debug("pattern_retrieved", pattern_id=pattern_id)
103
+ return pattern_data
104
+
105
+ except (OSError, PermissionError, json.JSONDecodeError) as e:
106
+ logger.error("pattern_retrieval_failed", pattern_id=pattern_id, error=str(e))
107
+ return None
108
+
109
+ def delete(self, pattern_id: str) -> bool:
110
+ """Delete a pattern.
111
+
112
+ Args:
113
+ pattern_id: Unique pattern identifier
114
+
115
+ Returns:
116
+ True if deleted, False if not found
117
+
118
+ """
119
+ try:
120
+ pattern_file = self.storage_dir / f"{pattern_id}.json"
121
+
122
+ if not pattern_file.exists():
123
+ return False
124
+
125
+ pattern_file.unlink()
126
+ logger.info("pattern_deleted", pattern_id=pattern_id)
127
+ return True
128
+
129
+ except (OSError, PermissionError) as e:
130
+ logger.error("pattern_deletion_failed", pattern_id=pattern_id, error=str(e))
131
+ return False
132
+
133
+ def list_patterns(
134
+ self,
135
+ classification: str | None = None,
136
+ created_by: str | None = None,
137
+ ) -> list[str]:
138
+ """List pattern IDs matching criteria.
139
+
140
+ Args:
141
+ classification: Filter by classification
142
+ created_by: Filter by creator
143
+
144
+ Returns:
145
+ List of pattern IDs
146
+
147
+ """
148
+ pattern_ids = []
149
+
150
+ for pattern_file in self.storage_dir.glob("*.json"):
151
+ try:
152
+ with open(pattern_file, encoding="utf-8") as f:
153
+ data = json.load(f)
154
+ metadata = data.get("metadata", {})
155
+
156
+ # Apply filters
157
+ if classification and metadata.get("classification") != classification:
158
+ continue
159
+ if created_by and metadata.get("created_by") != created_by:
160
+ continue
161
+
162
+ pattern_ids.append(data.get("pattern_id"))
163
+
164
+ except Exception:
165
+ continue
166
+
167
+ return pattern_ids
@@ -188,6 +188,11 @@ class RedisMetrics:
188
188
  return 100.0
189
189
  return (self.operations_success / self.operations_total) * 100
190
190
 
191
+ @property
192
+ def total_requests(self) -> int:
193
+ """Total requests (alias for operations_total for backward compatibility)."""
194
+ return self.operations_total
195
+
191
196
  def to_dict(self) -> dict:
192
197
  """Convert metrics to dictionary for reporting and serialization.
193
198
 
@@ -297,11 +302,11 @@ class StagedPattern:
297
302
  """Validate fields after initialization"""
298
303
  # Pattern 1: String ID validation
299
304
  if not self.pattern_id or not self.pattern_id.strip():
300
- raise ValueError("pattern_id cannot be empty")
305
+ raise ValueError(f"pattern_id cannot be empty. Got: {self.pattern_id!r}")
301
306
  if not self.agent_id or not self.agent_id.strip():
302
- raise ValueError("agent_id cannot be empty")
307
+ raise ValueError(f"agent_id cannot be empty. Got: {self.agent_id!r}")
303
308
  if not self.pattern_type or not self.pattern_type.strip():
304
- raise ValueError("pattern_type cannot be empty")
309
+ raise ValueError(f"pattern_type cannot be empty. Got: {self.pattern_type!r}")
305
310
 
306
311
  # Pattern 4: Range validation for confidence
307
312
  if not 0.0 <= self.confidence <= 1.0: