empathy-framework 3.5.6__py3-none-any.whl → 3.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. agents/compliance_anticipation_agent.py +113 -118
  2. agents/compliance_db.py +339 -0
  3. agents/epic_integration_wizard.py +37 -48
  4. agents/notifications.py +291 -0
  5. agents/trust_building_behaviors.py +66 -85
  6. coach_wizards/__init__.py +11 -12
  7. coach_wizards/accessibility_wizard.py +12 -12
  8. coach_wizards/api_wizard.py +12 -12
  9. coach_wizards/base_wizard.py +26 -20
  10. coach_wizards/cicd_wizard.py +15 -13
  11. coach_wizards/compliance_wizard.py +12 -12
  12. coach_wizards/database_wizard.py +12 -12
  13. coach_wizards/debugging_wizard.py +12 -12
  14. coach_wizards/documentation_wizard.py +12 -12
  15. coach_wizards/generate_wizards.py +1 -2
  16. coach_wizards/localization_wizard.py +21 -14
  17. coach_wizards/migration_wizard.py +12 -12
  18. coach_wizards/monitoring_wizard.py +12 -12
  19. coach_wizards/observability_wizard.py +12 -12
  20. coach_wizards/performance_wizard.py +12 -12
  21. coach_wizards/prompt_engineering_wizard.py +22 -25
  22. coach_wizards/refactoring_wizard.py +12 -12
  23. coach_wizards/scaling_wizard.py +12 -12
  24. coach_wizards/security_wizard.py +12 -12
  25. coach_wizards/testing_wizard.py +12 -12
  26. {empathy_framework-3.5.6.dist-info → empathy_framework-3.7.0.dist-info}/METADATA +234 -30
  27. empathy_framework-3.7.0.dist-info/RECORD +105 -0
  28. empathy_healthcare_plugin/__init__.py +1 -2
  29. empathy_llm_toolkit/__init__.py +5 -6
  30. empathy_llm_toolkit/claude_memory.py +14 -15
  31. empathy_llm_toolkit/code_health.py +27 -19
  32. empathy_llm_toolkit/contextual_patterns.py +11 -12
  33. empathy_llm_toolkit/core.py +43 -49
  34. empathy_llm_toolkit/git_pattern_extractor.py +16 -12
  35. empathy_llm_toolkit/levels.py +6 -13
  36. empathy_llm_toolkit/pattern_confidence.py +14 -18
  37. empathy_llm_toolkit/pattern_resolver.py +10 -12
  38. empathy_llm_toolkit/pattern_summary.py +13 -11
  39. empathy_llm_toolkit/providers.py +27 -38
  40. empathy_llm_toolkit/session_status.py +18 -20
  41. empathy_llm_toolkit/state.py +20 -21
  42. empathy_os/__init__.py +72 -73
  43. empathy_os/cli.py +193 -98
  44. empathy_os/cli_unified.py +68 -41
  45. empathy_os/config.py +31 -31
  46. empathy_os/coordination.py +48 -54
  47. empathy_os/core.py +90 -99
  48. empathy_os/cost_tracker.py +20 -23
  49. empathy_os/discovery.py +9 -11
  50. empathy_os/emergence.py +20 -21
  51. empathy_os/exceptions.py +18 -30
  52. empathy_os/feedback_loops.py +27 -30
  53. empathy_os/levels.py +31 -34
  54. empathy_os/leverage_points.py +27 -28
  55. empathy_os/logging_config.py +11 -12
  56. empathy_os/monitoring.py +27 -27
  57. empathy_os/pattern_library.py +29 -28
  58. empathy_os/persistence.py +30 -34
  59. empathy_os/platform_utils.py +46 -47
  60. empathy_os/redis_config.py +14 -15
  61. empathy_os/redis_memory.py +53 -56
  62. empathy_os/templates.py +12 -11
  63. empathy_os/trust_building.py +44 -36
  64. empathy_os/workflow_commands.py +123 -31
  65. empathy_software_plugin/__init__.py +1 -2
  66. empathy_software_plugin/cli.py +32 -25
  67. empathy_software_plugin/plugin.py +4 -8
  68. empathy_framework-3.5.6.dist-info/RECORD +0 -103
  69. {empathy_framework-3.5.6.dist-info → empathy_framework-3.7.0.dist-info}/WHEEL +0 -0
  70. {empathy_framework-3.5.6.dist-info → empathy_framework-3.7.0.dist-info}/entry_points.txt +0 -0
  71. {empathy_framework-3.5.6.dist-info → empathy_framework-3.7.0.dist-info}/licenses/LICENSE +0 -0
  72. {empathy_framework-3.5.6.dist-info → empathy_framework-3.7.0.dist-info}/top_level.txt +0 -0
empathy_os/monitoring.py CHANGED
@@ -1,5 +1,4 @@
1
- """
2
- Agent Monitoring for Distributed Memory Networks
1
+ """Agent Monitoring for Distributed Memory Networks
3
2
 
4
3
  Provides monitoring and metrics collection for multi-agent systems.
5
4
  Tracks individual agent performance, pattern contributions, and
@@ -79,8 +78,7 @@ class TeamMetrics:
79
78
 
80
79
  @property
81
80
  def collaboration_efficiency(self) -> float:
82
- """
83
- Measure of how effectively agents collaborate.
81
+ """Measure of how effectively agents collaborate.
84
82
 
85
83
  Higher values indicate more cross-agent pattern reuse,
86
84
  meaning agents are learning from each other.
@@ -91,8 +89,7 @@ class TeamMetrics:
91
89
 
92
90
 
93
91
  class AgentMonitor:
94
- """
95
- Monitors and tracks metrics for multi-agent systems.
92
+ """Monitors and tracks metrics for multi-agent systems.
96
93
 
97
94
  Provides insights into:
98
95
  - Individual agent performance
@@ -116,14 +113,15 @@ class AgentMonitor:
116
113
  >>> # Get team stats
117
114
  >>> team = monitor.get_team_stats()
118
115
  >>> print(f"Collaboration efficiency: {team['collaboration_efficiency']:.0%}")
116
+
119
117
  """
120
118
 
121
119
  def __init__(self, pattern_library: PatternLibrary | None = None):
122
- """
123
- Initialize the AgentMonitor.
120
+ """Initialize the AgentMonitor.
124
121
 
125
122
  Args:
126
123
  pattern_library: Optional pattern library to track for shared patterns
124
+
127
125
  """
128
126
  self.agents: dict[str, AgentMetrics] = {}
129
127
  self.pattern_library = pattern_library
@@ -139,12 +137,12 @@ class AgentMonitor:
139
137
  agent_id: str,
140
138
  response_time_ms: float = 0.0,
141
139
  ):
142
- """
143
- Record an agent interaction.
140
+ """Record an agent interaction.
144
141
 
145
142
  Args:
146
143
  agent_id: ID of the agent
147
144
  response_time_ms: Response time in milliseconds
145
+
148
146
  """
149
147
  agent = self._get_or_create_agent(agent_id)
150
148
  agent.total_interactions += 1
@@ -160,12 +158,12 @@ class AgentMonitor:
160
158
  )
161
159
 
162
160
  def record_pattern_discovery(self, agent_id: str, pattern_id: str | None = None):
163
- """
164
- Record that an agent discovered a new pattern.
161
+ """Record that an agent discovered a new pattern.
165
162
 
166
163
  Args:
167
164
  agent_id: ID of the agent that discovered the pattern
168
165
  pattern_id: Optional pattern ID for tracking
166
+
169
167
  """
170
168
  agent = self._get_or_create_agent(agent_id)
171
169
  agent.patterns_discovered += 1
@@ -178,14 +176,14 @@ class AgentMonitor:
178
176
  pattern_agent: str | None = None,
179
177
  success: bool = True,
180
178
  ):
181
- """
182
- Record that an agent used a pattern.
179
+ """Record that an agent used a pattern.
183
180
 
184
181
  Args:
185
182
  agent_id: ID of the agent using the pattern
186
183
  pattern_id: ID of the pattern being used
187
184
  pattern_agent: ID of the agent that contributed the pattern
188
185
  success: Whether the pattern use was successful
186
+
189
187
  """
190
188
  agent = self._get_or_create_agent(agent_id)
191
189
  agent.patterns_used += 1
@@ -207,18 +205,18 @@ class AgentMonitor:
207
205
  "cross_agent": is_cross_agent,
208
206
  "success": success,
209
207
  "timestamp": datetime.now(),
210
- }
208
+ },
211
209
  )
212
210
 
213
211
  def get_agent_stats(self, agent_id: str) -> dict[str, Any]:
214
- """
215
- Get statistics for a specific agent.
212
+ """Get statistics for a specific agent.
216
213
 
217
214
  Args:
218
215
  agent_id: ID of the agent
219
216
 
220
217
  Returns:
221
218
  Dictionary with agent statistics
219
+
222
220
  """
223
221
  agent = self.agents.get(agent_id)
224
222
 
@@ -252,11 +250,11 @@ class AgentMonitor:
252
250
  }
253
251
 
254
252
  def get_team_stats(self) -> dict[str, Any]:
255
- """
256
- Get aggregated statistics for the entire agent team.
253
+ """Get aggregated statistics for the entire agent team.
257
254
 
258
255
  Returns:
259
256
  Dictionary with team-wide statistics
257
+
260
258
  """
261
259
  if not self.agents:
262
260
  # Get shared patterns count from library even if no agents
@@ -319,39 +317,41 @@ class AgentMonitor:
319
317
  }
320
318
 
321
319
  def get_top_contributors(self, n: int = 5) -> list[dict[str, Any]]:
322
- """
323
- Get the top pattern-contributing agents.
320
+ """Get the top pattern-contributing agents.
324
321
 
325
322
  Args:
326
323
  n: Number of agents to return
327
324
 
328
325
  Returns:
329
326
  List of agent stats, sorted by patterns discovered
327
+
330
328
  """
331
329
  sorted_agents = sorted(
332
- self.agents.values(), key=lambda a: a.patterns_discovered, reverse=True
330
+ self.agents.values(),
331
+ key=lambda a: a.patterns_discovered,
332
+ reverse=True,
333
333
  )
334
334
 
335
335
  return [self.get_agent_stats(agent.agent_id) for agent in sorted_agents[:n]]
336
336
 
337
337
  def get_alerts(self, limit: int = 100) -> list[dict[str, Any]]:
338
- """
339
- Get recent alerts.
338
+ """Get recent alerts.
340
339
 
341
340
  Args:
342
341
  limit: Maximum number of alerts to return
343
342
 
344
343
  Returns:
345
344
  List of alert dictionaries
345
+
346
346
  """
347
347
  return self.alerts[-limit:]
348
348
 
349
349
  def check_health(self) -> dict[str, Any]:
350
- """
351
- Check overall system health.
350
+ """Check overall system health.
352
351
 
353
352
  Returns:
354
353
  Health status dictionary
354
+
355
355
  """
356
356
  team_stats = self.get_team_stats()
357
357
  recent_alerts = [
@@ -400,7 +400,7 @@ class AgentMonitor:
400
400
  "message": message,
401
401
  "severity": severity,
402
402
  "timestamp": datetime.now(),
403
- }
403
+ },
404
404
  )
405
405
 
406
406
  # Keep alerts bounded
@@ -1,5 +1,4 @@
1
- """
2
- Pattern Library for Multi-Agent Collaboration
1
+ """Pattern Library for Multi-Agent Collaboration
3
2
 
4
3
  Enables AI agents to share discovered patterns with each other, accelerating
5
4
  learning across the agent collective (Level 5: Systems Empathy).
@@ -17,8 +16,7 @@ from typing import Any
17
16
 
18
17
  @dataclass
19
18
  class Pattern:
20
- """
21
- A discovered pattern that can be shared across AI agents
19
+ """A discovered pattern that can be shared across AI agents
22
20
 
23
21
  Patterns represent reusable solutions, common behaviors, or
24
22
  learned heuristics that one agent discovered and others can benefit from.
@@ -27,6 +25,7 @@ class Pattern:
27
25
  - Sequential patterns: "After action X, users typically need Y"
28
26
  - Temporal patterns: "On Mondays, prioritize Z"
29
27
  - Conditional patterns: "If context A, then approach B works best"
28
+
30
29
  """
31
30
 
32
31
  id: str
@@ -76,8 +75,7 @@ class PatternMatch:
76
75
 
77
76
 
78
77
  class PatternLibrary:
79
- """
80
- Shared library for multi-agent pattern discovery and sharing
78
+ """Shared library for multi-agent pattern discovery and sharing
81
79
 
82
80
  Enables Level 5 Systems Empathy: AI-AI cooperation where one agent's
83
81
  discovery benefits all agents in the collective.
@@ -112,6 +110,7 @@ class PatternLibrary:
112
110
  >>> context = {"recent_event": "system_update", "user_confusion": True}
113
111
  >>> matches = library.query_patterns("documentation_agent", context)
114
112
  >>> print(f"Found {len(matches)} relevant patterns")
113
+
115
114
  """
116
115
 
117
116
  def __init__(self):
@@ -121,8 +120,7 @@ class PatternLibrary:
121
120
  self.pattern_graph: dict[str, list[str]] = {} # pattern_id -> related_pattern_ids
122
121
 
123
122
  def contribute_pattern(self, agent_id: str, pattern: Pattern) -> None:
124
- """
125
- Agent contributes a discovered pattern to the library
123
+ """Agent contributes a discovered pattern to the library
126
124
 
127
125
  Args:
128
126
  agent_id: ID of contributing agent
@@ -138,6 +136,7 @@ class PatternLibrary:
138
136
  ... confidence=0.9
139
137
  ... )
140
138
  >>> library.contribute_pattern("agent_1", pattern)
139
+
141
140
  """
142
141
  # Store pattern
143
142
  self.patterns[pattern.id] = pattern
@@ -159,8 +158,7 @@ class PatternLibrary:
159
158
  min_confidence: float = 0.5,
160
159
  limit: int = 10,
161
160
  ) -> list[PatternMatch]:
162
- """
163
- Query relevant patterns for current context
161
+ """Query relevant patterns for current context
164
162
 
165
163
  Args:
166
164
  agent_id: ID of querying agent
@@ -179,6 +177,7 @@ class PatternLibrary:
179
177
  ... "time_of_day": "morning"
180
178
  ... }
181
179
  >>> matches = library.query_patterns("debug_agent", context, min_confidence=0.7)
180
+
182
181
  """
183
182
  matches: list[PatternMatch] = []
184
183
 
@@ -199,7 +198,7 @@ class PatternLibrary:
199
198
  pattern=pattern,
200
199
  relevance_score=relevance_score,
201
200
  matching_factors=matching_factors,
202
- )
201
+ ),
203
202
  )
204
203
 
205
204
  # Sort by relevance and limit
@@ -207,40 +206,40 @@ class PatternLibrary:
207
206
  return matches[:limit]
208
207
 
209
208
  def get_pattern(self, pattern_id: str) -> Pattern | None:
210
- """
211
- Get a specific pattern by ID
209
+ """Get a specific pattern by ID
212
210
 
213
211
  Args:
214
212
  pattern_id: Pattern identifier
215
213
 
216
214
  Returns:
217
215
  Pattern if found, None otherwise
216
+
218
217
  """
219
218
  return self.patterns.get(pattern_id)
220
219
 
221
220
  def record_pattern_outcome(self, pattern_id: str, success: bool):
222
- """
223
- Record outcome of using a pattern
221
+ """Record outcome of using a pattern
224
222
 
225
223
  Updates pattern statistics to improve future recommendations.
226
224
 
227
225
  Args:
228
226
  pattern_id: ID of pattern that was used
229
227
  success: Whether using the pattern was successful
228
+
230
229
  """
231
230
  pattern = self.patterns.get(pattern_id)
232
231
  if pattern:
233
232
  pattern.record_usage(success)
234
233
 
235
234
  def link_patterns(self, pattern_id_1: str, pattern_id_2: str):
236
- """
237
- Create a link between related patterns
235
+ """Create a link between related patterns
238
236
 
239
237
  Helps agents discover complementary patterns.
240
238
 
241
239
  Args:
242
240
  pattern_id_1: First pattern ID
243
241
  pattern_id_2: Second pattern ID
242
+
244
243
  """
245
244
  if pattern_id_1 in self.pattern_graph:
246
245
  if pattern_id_2 not in self.pattern_graph[pattern_id_1]:
@@ -251,8 +250,7 @@ class PatternLibrary:
251
250
  self.pattern_graph[pattern_id_2].append(pattern_id_1)
252
251
 
253
252
  def get_related_patterns(self, pattern_id: str, depth: int = 1) -> list[Pattern]:
254
- """
255
- Get patterns related to a given pattern
253
+ """Get patterns related to a given pattern
256
254
 
257
255
  Args:
258
256
  pattern_id: Source pattern ID
@@ -260,6 +258,7 @@ class PatternLibrary:
260
258
 
261
259
  Returns:
262
260
  List of related patterns
261
+
263
262
  """
264
263
  if depth <= 0 or pattern_id not in self.pattern_graph:
265
264
  return []
@@ -278,21 +277,20 @@ class PatternLibrary:
278
277
  return [self.patterns[pid] for pid in related_ids if pid in self.patterns]
279
278
 
280
279
  def get_agent_patterns(self, agent_id: str) -> list[Pattern]:
281
- """
282
- Get all patterns contributed by a specific agent
280
+ """Get all patterns contributed by a specific agent
283
281
 
284
282
  Args:
285
283
  agent_id: Agent identifier
286
284
 
287
285
  Returns:
288
286
  List of patterns from this agent
287
+
289
288
  """
290
289
  pattern_ids = self.agent_contributions.get(agent_id, [])
291
290
  return [self.patterns[pid] for pid in pattern_ids if pid in self.patterns]
292
291
 
293
292
  def get_top_patterns(self, n: int = 10, sort_by: str = "success_rate") -> list[Pattern]:
294
- """
295
- Get top N patterns by specified metric
293
+ """Get top N patterns by specified metric
296
294
 
297
295
  Args:
298
296
  n: Number of patterns to return
@@ -300,6 +298,7 @@ class PatternLibrary:
300
298
 
301
299
  Returns:
302
300
  Top N patterns
301
+
303
302
  """
304
303
  patterns = list(self.patterns.values())
305
304
 
@@ -313,11 +312,11 @@ class PatternLibrary:
313
312
  return patterns[:n]
314
313
 
315
314
  def get_library_stats(self) -> dict[str, Any]:
316
- """
317
- Get statistics about the pattern library
315
+ """Get statistics about the pattern library
318
316
 
319
317
  Returns:
320
318
  Dict with library statistics
319
+
321
320
  """
322
321
  if not self.patterns:
323
322
  return {
@@ -350,13 +349,15 @@ class PatternLibrary:
350
349
  }
351
350
 
352
351
  def _calculate_relevance(
353
- self, pattern: Pattern, context: dict[str, Any]
352
+ self,
353
+ pattern: Pattern,
354
+ context: dict[str, Any],
354
355
  ) -> tuple[float, list[str]]:
355
- """
356
- Calculate how relevant a pattern is to current context
356
+ """Calculate how relevant a pattern is to current context
357
357
 
358
358
  Returns:
359
359
  (relevance_score, matching_factors)
360
+
360
361
  """
361
362
  relevance = 0.0
362
363
  matching_factors = []
empathy_os/persistence.py CHANGED
@@ -1,5 +1,4 @@
1
- """
2
- Persistence Layer for Empathy Framework
1
+ """Persistence Layer for Empathy Framework
3
2
 
4
3
  Provides:
5
4
  - Pattern library save/load (JSON, SQLite)
@@ -21,8 +20,7 @@ from .pattern_library import Pattern, PatternLibrary
21
20
 
22
21
 
23
22
  class PatternPersistence:
24
- """
25
- Save and load PatternLibrary to/from files
23
+ """Save and load PatternLibrary to/from files
26
24
 
27
25
  Supports:
28
26
  - JSON format (human-readable, good for backups)
@@ -31,8 +29,7 @@ class PatternPersistence:
31
29
 
32
30
  @staticmethod
33
31
  def save_to_json(library: PatternLibrary, filepath: str):
34
- """
35
- Save pattern library to JSON file
32
+ """Save pattern library to JSON file
36
33
 
37
34
  Args:
38
35
  library: PatternLibrary instance to save
@@ -41,6 +38,7 @@ class PatternPersistence:
41
38
  Example:
42
39
  >>> library = PatternLibrary()
43
40
  >>> PatternPersistence.save_to_json(library, "patterns.json")
41
+
44
42
  """
45
43
  patterns_list: list[dict[str, Any]] = []
46
44
  data: dict[str, Any] = {
@@ -71,7 +69,7 @@ class PatternPersistence:
71
69
  "tags": pattern.tags,
72
70
  "discovered_at": pattern.discovered_at.isoformat(),
73
71
  "last_used": pattern.last_used.isoformat() if pattern.last_used else None,
74
- }
72
+ },
75
73
  )
76
74
 
77
75
  # Write to file
@@ -80,8 +78,7 @@ class PatternPersistence:
80
78
 
81
79
  @staticmethod
82
80
  def load_from_json(filepath: str) -> PatternLibrary:
83
- """
84
- Load pattern library from JSON file
81
+ """Load pattern library from JSON file
85
82
 
86
83
  Args:
87
84
  filepath: Path to JSON file
@@ -95,6 +92,7 @@ class PatternPersistence:
95
92
 
96
93
  Example:
97
94
  >>> library = PatternPersistence.load_from_json("patterns.json")
95
+
98
96
  """
99
97
  with open(filepath) as f:
100
98
  data = json.load(f)
@@ -132,8 +130,7 @@ class PatternPersistence:
132
130
 
133
131
  @staticmethod
134
132
  def save_to_sqlite(library: PatternLibrary, db_path: str):
135
- """
136
- Save pattern library to SQLite database
133
+ """Save pattern library to SQLite database
137
134
 
138
135
  Args:
139
136
  library: PatternLibrary instance to save
@@ -146,6 +143,7 @@ class PatternPersistence:
146
143
  Example:
147
144
  >>> library = PatternLibrary()
148
145
  >>> PatternPersistence.save_to_sqlite(library, "patterns.db")
146
+
149
147
  """
150
148
  conn = sqlite3.connect(db_path)
151
149
  cursor = conn.cursor()
@@ -170,7 +168,7 @@ class PatternPersistence:
170
168
  last_used TIMESTAMP,
171
169
  updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
172
170
  )
173
- """
171
+ """,
174
172
  )
175
173
 
176
174
  cursor.execute(
@@ -183,7 +181,7 @@ class PatternPersistence:
183
181
  used_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
184
182
  FOREIGN KEY (pattern_id) REFERENCES patterns(id)
185
183
  )
186
- """
184
+ """,
187
185
  )
188
186
 
189
187
  # Insert or update patterns
@@ -219,8 +217,7 @@ class PatternPersistence:
219
217
 
220
218
  @staticmethod
221
219
  def load_from_sqlite(db_path: str) -> PatternLibrary:
222
- """
223
- Load pattern library from SQLite database
220
+ """Load pattern library from SQLite database
224
221
 
225
222
  Args:
226
223
  db_path: Path to SQLite database file
@@ -230,6 +227,7 @@ class PatternPersistence:
230
227
 
231
228
  Example:
232
229
  >>> library = PatternPersistence.load_from_sqlite("patterns.db")
230
+
233
231
  """
234
232
  conn = sqlite3.connect(db_path)
235
233
  conn.row_factory = sqlite3.Row # Access columns by name
@@ -265,8 +263,7 @@ class PatternPersistence:
265
263
 
266
264
 
267
265
  class StateManager:
268
- """
269
- Persist collaboration state across sessions
266
+ """Persist collaboration state across sessions
270
267
 
271
268
  Enables:
272
269
  - Long-term trust tracking
@@ -279,8 +276,7 @@ class StateManager:
279
276
  self.storage_path.mkdir(exist_ok=True, parents=True)
280
277
 
281
278
  def save_state(self, user_id: str, state: CollaborationState):
282
- """
283
- Save user's collaboration state to JSON
279
+ """Save user's collaboration state to JSON
284
280
 
285
281
  Args:
286
282
  user_id: User identifier
@@ -289,6 +285,7 @@ class StateManager:
289
285
  Example:
290
286
  >>> manager = StateManager()
291
287
  >>> manager.save_state("user123", empathy.collaboration_state)
288
+
292
289
  """
293
290
  filepath = self.storage_path / f"{user_id}.json"
294
291
 
@@ -308,8 +305,7 @@ class StateManager:
308
305
  json.dump(data, f, indent=2)
309
306
 
310
307
  def load_state(self, user_id: str) -> CollaborationState | None:
311
- """
312
- Load user's previous state
308
+ """Load user's previous state
313
309
 
314
310
  Args:
315
311
  user_id: User identifier
@@ -323,6 +319,7 @@ class StateManager:
323
319
  >>> if state:
324
320
  ... empathy = EmpathyOS(user_id="user123", target_level=4)
325
321
  ... empathy.collaboration_state = state
322
+
326
323
  """
327
324
  filepath = self.storage_path / f"{user_id}.json"
328
325
 
@@ -349,8 +346,7 @@ class StateManager:
349
346
  return None
350
347
 
351
348
  def list_users(self) -> list[str]:
352
- """
353
- List all users with saved state
349
+ """List all users with saved state
354
350
 
355
351
  Returns:
356
352
  List of user IDs
@@ -359,12 +355,12 @@ class StateManager:
359
355
  >>> manager = StateManager()
360
356
  >>> users = manager.list_users()
361
357
  >>> print(f"Found {len(users)} users")
358
+
362
359
  """
363
360
  return [p.stem for p in self.storage_path.glob("*.json")]
364
361
 
365
362
  def delete_state(self, user_id: str) -> bool:
366
- """
367
- Delete user's saved state
363
+ """Delete user's saved state
368
364
 
369
365
  Args:
370
366
  user_id: User identifier
@@ -375,6 +371,7 @@ class StateManager:
375
371
  Example:
376
372
  >>> manager = StateManager()
377
373
  >>> deleted = manager.delete_state("user123")
374
+
378
375
  """
379
376
  filepath = self.storage_path / f"{user_id}.json"
380
377
 
@@ -385,8 +382,7 @@ class StateManager:
385
382
 
386
383
 
387
384
  class MetricsCollector:
388
- """
389
- Collect and persist empathy framework metrics
385
+ """Collect and persist empathy framework metrics
390
386
 
391
387
  Tracks:
392
388
  - Empathy level usage
@@ -415,21 +411,21 @@ class MetricsCollector:
415
411
  timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
416
412
  metadata TEXT
417
413
  )
418
- """
414
+ """,
419
415
  )
420
416
 
421
417
  cursor.execute(
422
418
  """
423
419
  CREATE INDEX IF NOT EXISTS idx_user_level
424
420
  ON metrics(user_id, empathy_level)
425
- """
421
+ """,
426
422
  )
427
423
 
428
424
  cursor.execute(
429
425
  """
430
426
  CREATE INDEX IF NOT EXISTS idx_timestamp
431
427
  ON metrics(timestamp)
432
- """
428
+ """,
433
429
  )
434
430
 
435
431
  conn.commit()
@@ -443,8 +439,7 @@ class MetricsCollector:
443
439
  response_time_ms: float,
444
440
  metadata: dict | None = None,
445
441
  ):
446
- """
447
- Record a single metric event
442
+ """Record a single metric event
448
443
 
449
444
  Args:
450
445
  user_id: User identifier
@@ -462,6 +457,7 @@ class MetricsCollector:
462
457
  ... response_time_ms=250.5,
463
458
  ... metadata={"bottlenecks_predicted": 3}
464
459
  ... )
460
+
465
461
  """
466
462
  conn = sqlite3.connect(self.db_path)
467
463
  cursor = conn.cursor()
@@ -485,8 +481,7 @@ class MetricsCollector:
485
481
  conn.close()
486
482
 
487
483
  def get_user_stats(self, user_id: str) -> dict:
488
- """
489
- Get aggregated statistics for a user
484
+ """Get aggregated statistics for a user
490
485
 
491
486
  Args:
492
487
  user_id: User identifier
@@ -498,6 +493,7 @@ class MetricsCollector:
498
493
  >>> collector = MetricsCollector()
499
494
  >>> stats = collector.get_user_stats("user123")
500
495
  >>> print(f"Success rate: {stats['success_rate']:.1%}")
496
+
501
497
  """
502
498
  conn = sqlite3.connect(self.db_path)
503
499
  conn.row_factory = sqlite3.Row