agmem 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
memvcs/core/daemon.py ADDED
@@ -0,0 +1,735 @@
1
+ """
2
+ Real-Time Observation Daemon for agmem.
3
+
4
+ Background process that watches MCP tool activity and automatically commits
5
+ observations to the memory repository.
6
+
7
+ Features:
8
+ - MCP event stream listener (file-based or watchdog)
9
+ - Observation extraction and memory type classification
10
+ - Auto-staging with type-specific paths
11
+ - Batched auto-commit with LLM-generated messages
12
+ - Session management with crash recovery
13
+ """
14
+
15
+ import json
16
+ import logging
17
+ import os
18
+ import signal
19
+ import sys
20
+ import threading
21
+ import time
22
+ import uuid
23
+ from dataclasses import dataclass, field
24
+ from datetime import datetime, timezone
25
+ from pathlib import Path
26
+ from typing import Any, Callable, Dict, List, Optional
27
+
28
+ logger = logging.getLogger("agmem-daemon")
29
+
30
+
31
+ @dataclass
32
+ class Observation:
33
+ """A single observation from an MCP tool call."""
34
+
35
+ id: str
36
+ timestamp: str
37
+ tool_name: str
38
+ arguments: Dict[str, Any]
39
+ result: Optional[str] = None
40
+ memory_type: str = "episodic" # episodic, semantic, procedural
41
+ summary: Optional[str] = None
42
+
43
+ def to_dict(self) -> Dict[str, Any]:
44
+ return {
45
+ "id": self.id,
46
+ "timestamp": self.timestamp,
47
+ "tool_name": self.tool_name,
48
+ "arguments": self.arguments,
49
+ "result": self.result,
50
+ "memory_type": self.memory_type,
51
+ "summary": self.summary,
52
+ }
53
+
54
+ @classmethod
55
+ def from_dict(cls, data: Dict[str, Any]) -> "Observation":
56
+ return cls(
57
+ id=data.get("id", str(uuid.uuid4())),
58
+ timestamp=data.get("timestamp", datetime.now(timezone.utc).isoformat()),
59
+ tool_name=data["tool_name"],
60
+ arguments=data.get("arguments", {}),
61
+ result=data.get("result"),
62
+ memory_type=data.get("memory_type", "episodic"),
63
+ summary=data.get("summary"),
64
+ )
65
+
66
+
67
+ @dataclass
68
+ class SessionState:
69
+ """State of the current observation session."""
70
+
71
+ session_id: str
72
+ started_at: str
73
+ observations: List[Observation] = field(default_factory=list)
74
+ last_commit_at: Optional[str] = None
75
+ commit_count: int = 0
76
+
77
+ def to_dict(self) -> Dict[str, Any]:
78
+ return {
79
+ "session_id": self.session_id,
80
+ "started_at": self.started_at,
81
+ "observations": [o.to_dict() for o in self.observations],
82
+ "last_commit_at": self.last_commit_at,
83
+ "commit_count": self.commit_count,
84
+ }
85
+
86
+ @classmethod
87
+ def from_dict(cls, data: Dict[str, Any]) -> "SessionState":
88
+ return cls(
89
+ session_id=data["session_id"],
90
+ started_at=data["started_at"],
91
+ observations=[Observation.from_dict(o) for o in data.get("observations", [])],
92
+ last_commit_at=data.get("last_commit_at"),
93
+ commit_count=data.get("commit_count", 0),
94
+ )
95
+
96
+
97
+ class ObservationExtractor:
98
+ """Extracts observations from MCP tool calls and classifies memory types."""
99
+
100
+ # Tool name patterns → memory type mapping
101
+ MEMORY_TYPE_MAP = {
102
+ # Episodic: Events, actions, what happened
103
+ "episodic": [
104
+ "run_command",
105
+ "execute",
106
+ "shell",
107
+ "terminal",
108
+ "write_file",
109
+ "create_file",
110
+ "delete_file",
111
+ "move_file",
112
+ "copy_file",
113
+ "mkdir",
114
+ "git_",
115
+ "deploy",
116
+ "build",
117
+ "test",
118
+ "lint",
119
+ ],
120
+ # Semantic: Knowledge, facts, data
121
+ "semantic": [
122
+ "search",
123
+ "read_file",
124
+ "read_url",
125
+ "fetch",
126
+ "get_",
127
+ "list_",
128
+ "query",
129
+ "lookup",
130
+ "find",
131
+ "browse",
132
+ "api_call",
133
+ "database",
134
+ "memory_read",
135
+ "memory_search",
136
+ ],
137
+ # Procedural: How-to, processes, workflows
138
+ "procedural": [
139
+ "generate",
140
+ "refactor",
141
+ "implement",
142
+ "create_",
143
+ "setup",
144
+ "configure",
145
+ "install",
146
+ "workflow",
147
+ "pipeline",
148
+ "template",
149
+ ],
150
+ }
151
+
152
+ # Tools to ignore (trivial operations)
153
+ IGNORE_TOOLS = {
154
+ "echo",
155
+ "pwd",
156
+ "whoami",
157
+ "date",
158
+ "clear",
159
+ "history",
160
+ "noop",
161
+ "ping",
162
+ }
163
+
164
+ def __init__(self, min_content_length: int = 50):
165
+ self.min_content_length = min_content_length
166
+
167
+ def should_capture(self, tool_name: str, result: Optional[str] = None) -> bool:
168
+ """Determine if this tool call should be captured as an observation."""
169
+ if tool_name.lower() in self.IGNORE_TOOLS:
170
+ return False
171
+ if result and len(result) < self.min_content_length:
172
+ return False
173
+ return True
174
+
175
+ def classify_memory_type(self, tool_name: str) -> str:
176
+ """Classify the tool call into a memory type."""
177
+ tool_lower = tool_name.lower()
178
+ for memory_type, patterns in self.MEMORY_TYPE_MAP.items():
179
+ for pattern in patterns:
180
+ if pattern in tool_lower:
181
+ return memory_type
182
+ return "episodic" # Default to episodic
183
+
184
+ def extract(
185
+ self,
186
+ tool_name: str,
187
+ arguments: Dict[str, Any],
188
+ result: Optional[str] = None,
189
+ ) -> Optional[Observation]:
190
+ """Extract an observation from a tool call."""
191
+ if not self.should_capture(tool_name, result):
192
+ return None
193
+
194
+ memory_type = self.classify_memory_type(tool_name)
195
+
196
+ # Generate summary from tool name and key arguments
197
+ summary_parts = [tool_name]
198
+ for key in ["path", "file", "url", "query", "command"]:
199
+ if key in arguments:
200
+ val = str(arguments[key])[:100]
201
+ summary_parts.append(f"{key}={val}")
202
+
203
+ return Observation(
204
+ id=str(uuid.uuid4()),
205
+ timestamp=datetime.now(timezone.utc).isoformat(),
206
+ tool_name=tool_name,
207
+ arguments=arguments,
208
+ result=result[:2000] if result else None, # Truncate large results
209
+ memory_type=memory_type,
210
+ summary=" ".join(summary_parts)[:200],
211
+ )
212
+
213
+
214
+ class AutoStagingEngine:
215
+ """Writes observations to the current/ directory with type-specific paths."""
216
+
217
+ def __init__(self, repo_root: Path):
218
+ self.repo_root = Path(repo_root)
219
+ self.current_dir = self.repo_root / "current"
220
+
221
+ def stage_observation(self, observation: Observation) -> Path:
222
+ """Write observation to current/ directory and return the path."""
223
+ # Parse timestamp for date/time path components
224
+ ts = datetime.fromisoformat(observation.timestamp.replace("Z", "+00:00"))
225
+ date_str = ts.strftime("%Y-%m-%d")
226
+ time_str = ts.strftime("%H-%M-%S")
227
+
228
+ # Build path based on memory type
229
+ if observation.memory_type == "episodic":
230
+ # episodic/YYYY-MM-DD/HH-MM-SS-tool_name.md
231
+ subdir = self.current_dir / "episodic" / date_str
232
+ filename = f"{time_str}-{observation.tool_name}.md"
233
+ elif observation.memory_type == "semantic":
234
+ # semantic/topic_name.md (use tool_name as topic)
235
+ subdir = self.current_dir / "semantic"
236
+ filename = f"{observation.tool_name}-{observation.id[:8]}.md"
237
+ else:
238
+ # procedural/task_name.md
239
+ subdir = self.current_dir / "procedural"
240
+ filename = f"{observation.tool_name}-{observation.id[:8]}.md"
241
+
242
+ subdir.mkdir(parents=True, exist_ok=True)
243
+ filepath = subdir / filename
244
+
245
+ # Format content as markdown
246
+ content = self._format_observation_md(observation)
247
+ filepath.write_text(content, encoding="utf-8")
248
+
249
+ return filepath
250
+
251
+ def _format_observation_md(self, observation: Observation) -> str:
252
+ """Format observation as markdown with YAML frontmatter."""
253
+ args_json = json.dumps(observation.arguments, indent=2, default=str)
254
+ result_preview = (observation.result or "")[:500]
255
+ if observation.result and len(observation.result) > 500:
256
+ result_preview += "\n... (truncated)"
257
+
258
+ return f"""---
259
+ schema_version: "1.0"
260
+ memory_type: {observation.memory_type}
261
+ observation_id: {observation.id}
262
+ timestamp: {observation.timestamp}
263
+ tool_name: {observation.tool_name}
264
+ auto_captured: true
265
+ ---
266
+
267
+ # {observation.summary or observation.tool_name}
268
+
269
+ ## Tool Call
270
+
271
+ **Tool:** `{observation.tool_name}`
272
+ **Time:** {observation.timestamp}
273
+
274
+ ### Arguments
275
+
276
+ ```json
277
+ {args_json}
278
+ ```
279
+
280
+ ### Result
281
+
282
+ ```
283
+ {result_preview}
284
+ ```
285
+ """
286
+
287
+
288
+ class CommitMessageGenerator:
289
+ """Generates semantic commit messages from observations using LLM or templates."""
290
+
291
+ def __init__(self, use_llm: bool = True, llm_model: str = "gpt-4o-mini"):
292
+ self.use_llm = use_llm
293
+ self.llm_model = llm_model
294
+
295
+ def generate(self, observations: List[Observation]) -> str:
296
+ """Generate a commit message for a batch of observations."""
297
+ if not observations:
298
+ return "Auto-commit: Empty observation batch"
299
+
300
+ if self.use_llm:
301
+ try:
302
+ return self._generate_llm(observations)
303
+ except Exception as e:
304
+ logger.warning(f"LLM message generation failed: {e}, falling back to template")
305
+
306
+ return self._generate_template(observations)
307
+
308
+ def _generate_template(self, observations: List[Observation]) -> str:
309
+ """Generate a template-based commit message."""
310
+ tool_counts: Dict[str, int] = {}
311
+ for obs in observations:
312
+ tool_counts[obs.tool_name] = tool_counts.get(obs.tool_name, 0) + 1
313
+
314
+ # Build subject line
315
+ if len(tool_counts) == 1:
316
+ tool_name = list(tool_counts.keys())[0]
317
+ count = tool_counts[tool_name]
318
+ subject = f"Auto-commit: {count} {tool_name} observation(s)"
319
+ else:
320
+ subject = f"Auto-commit: {len(observations)} observations from {len(tool_counts)} tools"
321
+
322
+ # Build body
323
+ body_lines = ["", "Captured observations:"]
324
+ for obs in observations[:10]: # Show first 10
325
+ body_lines.append(f"- [{obs.memory_type}] {obs.summary or obs.tool_name}")
326
+ if len(observations) > 10:
327
+ body_lines.append(f"... and {len(observations) - 10} more")
328
+
329
+ return subject + "\n" + "\n".join(body_lines)
330
+
331
+ def _generate_llm(self, observations: List[Observation]) -> str:
332
+ """Generate commit message using LLM."""
333
+ try:
334
+ from memvcs.core.llm import get_llm_provider
335
+ except ImportError:
336
+ logger.warning("LLM provider not available, using template")
337
+ return self._generate_template(observations)
338
+
339
+ provider = get_llm_provider()
340
+ if not provider:
341
+ return self._generate_template(observations)
342
+
343
+ # Build observation summary for prompt
344
+ obs_text = "\n".join(
345
+ f"- {obs.timestamp}: {obs.tool_name} - {obs.summary or 'No summary'}"
346
+ for obs in observations[:20]
347
+ )
348
+
349
+ prompt = f"""Generate a concise Git-style commit message for these agent observations:
350
+
351
+ {obs_text}
352
+
353
+ Requirements:
354
+ - Subject line: max 50 chars, imperative mood (e.g., "Implement", "Fix", "Update")
355
+ - Body: bullet points of key changes (optional if simple)
356
+ - Focus on WHAT was accomplished, not individual tool calls
357
+
358
+ Output only the commit message, no explanations."""
359
+
360
+ try:
361
+ response = provider.complete(prompt, model=self.llm_model, max_tokens=200)
362
+ return response.strip()
363
+ except Exception as e:
364
+ logger.warning(f"LLM call failed: {e}")
365
+ return self._generate_template(observations)
366
+
367
+
368
+ class ObservationDaemon:
369
+ """Background daemon that captures observations and auto-commits."""
370
+
371
+ def __init__(
372
+ self,
373
+ repo_root: Path,
374
+ commit_interval_seconds: int = 300,
375
+ max_buffer_size: int = 50,
376
+ use_llm_messages: bool = True,
377
+ ):
378
+ self.repo_root = Path(repo_root)
379
+ self.mem_dir = self.repo_root / ".mem"
380
+ self.commit_interval = commit_interval_seconds
381
+ self.max_buffer_size = max_buffer_size
382
+
383
+ self.extractor = ObservationExtractor()
384
+ self.stager = AutoStagingEngine(repo_root)
385
+ self.message_gen = CommitMessageGenerator(use_llm=use_llm_messages)
386
+
387
+ self.session: Optional[SessionState] = None
388
+ self._running = False
389
+ self._commit_timer: Optional[threading.Timer] = None
390
+ self._lock = threading.Lock()
391
+
392
+ # --- Session Management ---
393
+
394
+ def _session_file(self) -> Path:
395
+ return self.mem_dir / "daemon_session.json"
396
+
397
+ def _buffer_file(self) -> Path:
398
+ return self.mem_dir / "daemon_buffer.jsonl"
399
+
400
+ def _pid_file(self) -> Path:
401
+ return self.mem_dir / "daemon.pid"
402
+
403
+ def _load_session(self) -> Optional[SessionState]:
404
+ """Load existing session from disk if available."""
405
+ path = self._session_file()
406
+ if path.exists():
407
+ try:
408
+ data = json.loads(path.read_text())
409
+ return SessionState.from_dict(data)
410
+ except Exception as e:
411
+ logger.warning(f"Failed to load session: {e}")
412
+ return None
413
+
414
+ def _save_session(self) -> None:
415
+ """Persist session state to disk."""
416
+ if not self.session:
417
+ return
418
+ path = self._session_file()
419
+ path.parent.mkdir(parents=True, exist_ok=True)
420
+ path.write_text(json.dumps(self.session.to_dict(), indent=2))
421
+
422
+ def _append_to_buffer(self, observation: Observation) -> None:
423
+ """Append observation to disk buffer for crash recovery."""
424
+ path = self._buffer_file()
425
+ path.parent.mkdir(parents=True, exist_ok=True)
426
+ with open(path, "a", encoding="utf-8") as f:
427
+ f.write(json.dumps(observation.to_dict()) + "\n")
428
+
429
+ def _clear_buffer(self) -> None:
430
+ """Clear the disk buffer after successful commit."""
431
+ path = self._buffer_file()
432
+ if path.exists():
433
+ path.unlink()
434
+
435
+ def _recover_buffer(self) -> List[Observation]:
436
+ """Recover observations from disk buffer."""
437
+ path = self._buffer_file()
438
+ if not path.exists():
439
+ return []
440
+
441
+ observations = []
442
+ for line in path.read_text().strip().split("\n"):
443
+ if line.strip():
444
+ try:
445
+ observations.append(Observation.from_dict(json.loads(line)))
446
+ except Exception:
447
+ pass
448
+ return observations
449
+
450
+ # --- Daemon Lifecycle ---
451
+
452
+ def start(self) -> None:
453
+ """Start the daemon."""
454
+ if self._running:
455
+ logger.warning("Daemon already running")
456
+ return
457
+
458
+ # Check if another daemon is running
459
+ pid_file = self._pid_file()
460
+ if pid_file.exists():
461
+ try:
462
+ old_pid = int(pid_file.read_text().strip())
463
+ # Check if process is still running
464
+ try:
465
+ os.kill(old_pid, 0)
466
+ logger.error(f"Another daemon is running (PID: {old_pid})")
467
+ return
468
+ except OSError:
469
+ pass # Process not running, continue
470
+ except Exception:
471
+ pass
472
+
473
+ # Write PID file
474
+ pid_file.parent.mkdir(parents=True, exist_ok=True)
475
+ pid_file.write_text(str(os.getpid()))
476
+
477
+ # Load or create session
478
+ self.session = self._load_session()
479
+ if not self.session:
480
+ self.session = SessionState(
481
+ session_id=str(uuid.uuid4()),
482
+ started_at=datetime.now(timezone.utc).isoformat(),
483
+ )
484
+
485
+ # Recover any buffered observations
486
+ recovered = self._recover_buffer()
487
+ if recovered:
488
+ logger.info(f"Recovered {len(recovered)} observations from buffer")
489
+ self.session.observations.extend(recovered)
490
+
491
+ self._running = True
492
+ self._save_session()
493
+ self._start_commit_timer()
494
+
495
+ logger.info(
496
+ f"Daemon started (session: {self.session.session_id[:8]}, "
497
+ f"observations: {len(self.session.observations)})"
498
+ )
499
+
500
+ def stop(self) -> None:
501
+ """Stop the daemon gracefully."""
502
+ if not self._running:
503
+ return
504
+
505
+ logger.info("Stopping daemon...")
506
+ self._running = False
507
+
508
+ # Cancel commit timer
509
+ if self._commit_timer:
510
+ self._commit_timer.cancel()
511
+
512
+ # Final commit if there are pending observations
513
+ if self.session and self.session.observations:
514
+ self._commit_observations()
515
+
516
+ # Clean up
517
+ self._save_session()
518
+ pid_file = self._pid_file()
519
+ if pid_file.exists():
520
+ pid_file.unlink()
521
+
522
+ logger.info("Daemon stopped")
523
+
524
+ def _start_commit_timer(self) -> None:
525
+ """Start the periodic commit timer."""
526
+ if not self._running:
527
+ return
528
+
529
+ self._commit_timer = threading.Timer(
530
+ self.commit_interval,
531
+ self._on_commit_timer,
532
+ )
533
+ self._commit_timer.daemon = True
534
+ self._commit_timer.start()
535
+
536
+ def _on_commit_timer(self) -> None:
537
+ """Timer callback for periodic commits."""
538
+ if not self._running:
539
+ return
540
+
541
+ with self._lock:
542
+ if self.session and self.session.observations:
543
+ self._commit_observations()
544
+
545
+ self._start_commit_timer()
546
+
547
+ # --- Observation Handling ---
548
+
549
+ def add_observation(
550
+ self,
551
+ tool_name: str,
552
+ arguments: Dict[str, Any],
553
+ result: Optional[str] = None,
554
+ ) -> Optional[str]:
555
+ """Add a new observation. Returns observation ID if captured."""
556
+ if not self._running or not self.session:
557
+ logger.warning("Daemon not running, observation not captured")
558
+ return None
559
+
560
+ observation = self.extractor.extract(tool_name, arguments, result)
561
+ if not observation:
562
+ return None
563
+
564
+ with self._lock:
565
+ # Stage the observation file
566
+ self.stager.stage_observation(observation)
567
+
568
+ # Add to session
569
+ self.session.observations.append(observation)
570
+ self._append_to_buffer(observation)
571
+ self._save_session()
572
+
573
+ # Auto-commit if buffer is full
574
+ if len(self.session.observations) >= self.max_buffer_size:
575
+ self._commit_observations()
576
+
577
+ logger.debug(f"Captured: {observation.tool_name} ({observation.memory_type})")
578
+ return observation.id
579
+
580
+ def _commit_observations(self) -> Optional[str]:
581
+ """Commit all pending observations."""
582
+ if not self.session or not self.session.observations:
583
+ return None
584
+
585
+ try:
586
+ from memvcs.core.repository import Repository
587
+ from memvcs.core.audit import append_audit
588
+ except ImportError as e:
589
+ logger.error(f"Failed to import repository: {e}")
590
+ return None
591
+
592
+ try:
593
+ repo = Repository(self.repo_root)
594
+ if not repo.is_valid_repo():
595
+ logger.error("Not a valid agmem repository")
596
+ return None
597
+
598
+ # Stage all observation files
599
+ repo.stage_directory("")
600
+
601
+ # Generate commit message
602
+ message = self.message_gen.generate(self.session.observations)
603
+
604
+ # Commit with metadata
605
+ commit_hash = repo.commit(
606
+ message,
607
+ metadata={
608
+ "daemon_session_id": self.session.session_id,
609
+ "observation_count": len(self.session.observations),
610
+ "auto_commit": True,
611
+ },
612
+ )
613
+
614
+ # Log to audit trail
615
+ append_audit(
616
+ self.mem_dir,
617
+ "daemon_commit",
618
+ {
619
+ "session_id": self.session.session_id,
620
+ "commit_hash": commit_hash,
621
+ "observations_count": len(self.session.observations),
622
+ },
623
+ )
624
+
625
+ logger.info(
626
+ f"Committed {len(self.session.observations)} observations: {commit_hash[:8]}"
627
+ )
628
+
629
+ # Clear buffer and observations
630
+ self._clear_buffer()
631
+ self.session.observations = []
632
+ self.session.last_commit_at = datetime.now(timezone.utc).isoformat()
633
+ self.session.commit_count += 1
634
+ self._save_session()
635
+
636
+ return commit_hash
637
+
638
+ except Exception as e:
639
+ logger.error(f"Commit failed: {e}")
640
+ return None
641
+
642
+ # --- Status and Info ---
643
+
644
+ def get_status(self) -> Dict[str, Any]:
645
+ """Get daemon status information."""
646
+ return {
647
+ "running": self._running,
648
+ "session_id": self.session.session_id if self.session else None,
649
+ "started_at": self.session.started_at if self.session else None,
650
+ "pending_observations": len(self.session.observations) if self.session else 0,
651
+ "commit_count": self.session.commit_count if self.session else 0,
652
+ "last_commit_at": self.session.last_commit_at if self.session else None,
653
+ "commit_interval_seconds": self.commit_interval,
654
+ "max_buffer_size": self.max_buffer_size,
655
+ }
656
+
657
+
658
+ # --- Daemon Process Entry Point ---
659
+
660
+
661
+ def _handle_signals(daemon: ObservationDaemon) -> None:
662
+ """Set up signal handlers for graceful shutdown."""
663
+
664
+ def handler(signum: int, frame: Any) -> None:
665
+ logger.info(f"Received signal {signum}, shutting down...")
666
+ daemon.stop()
667
+ sys.exit(0)
668
+
669
+ signal.signal(signal.SIGTERM, handler)
670
+ signal.signal(signal.SIGINT, handler)
671
+
672
+
673
+ def run_daemon(
674
+ repo_root: Path,
675
+ commit_interval: int = 300,
676
+ max_buffer: int = 50,
677
+ use_llm: bool = True,
678
+ foreground: bool = False,
679
+ ) -> int:
680
+ """Run the observation daemon."""
681
+ logging.basicConfig(
682
+ level=logging.INFO,
683
+ format="%(asctime)s %(name)s: %(message)s",
684
+ stream=sys.stderr,
685
+ )
686
+
687
+ daemon = ObservationDaemon(
688
+ repo_root=repo_root,
689
+ commit_interval_seconds=commit_interval,
690
+ max_buffer_size=max_buffer,
691
+ use_llm_messages=use_llm,
692
+ )
693
+
694
+ _handle_signals(daemon)
695
+ daemon.start()
696
+
697
+ if foreground:
698
+ try:
699
+ while daemon._running:
700
+ time.sleep(1)
701
+ except KeyboardInterrupt:
702
+ daemon.stop()
703
+
704
+ return 0
705
+
706
+
707
+ # --- Public API for MCP integration ---
708
+
709
+
710
+ _daemon_instance: Optional[ObservationDaemon] = None
711
+
712
+
713
+ def get_daemon() -> Optional[ObservationDaemon]:
714
+ """Get the global daemon instance."""
715
+ return _daemon_instance
716
+
717
+
718
+ def initialize_daemon(repo_root: Path, **kwargs: Any) -> ObservationDaemon:
719
+ """Initialize and return the global daemon instance."""
720
+ global _daemon_instance
721
+ if _daemon_instance is None:
722
+ _daemon_instance = ObservationDaemon(repo_root, **kwargs)
723
+ return _daemon_instance
724
+
725
+
726
+ def capture_observation(
727
+ tool_name: str,
728
+ arguments: Dict[str, Any],
729
+ result: Optional[str] = None,
730
+ ) -> Optional[str]:
731
+ """Capture an observation via the global daemon. Returns observation ID if captured."""
732
+ daemon = get_daemon()
733
+ if daemon and daemon._running:
734
+ return daemon.add_observation(tool_name, arguments, result)
735
+ return None