delimit-cli 4.1.44 → 4.1.48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,445 @@
1
+ """Cross-session MCP hot reload (LED-799).
2
+
3
+ Solves the pain where one Claude session edits ai/*.py and other sessions
4
+ have to restart the MCP server to pick up the change. There are three
5
+ distinct cases this module handles:
6
+
7
+ 1. **Edited helper module** (e.g. ai/content_intel.py changed):
8
+ importlib.reload() the module so tools that lazily `from ai.X import Y`
9
+ inside their function body pick up the new code on the next call.
10
+
11
+ 2. **New helper module** (e.g. ai/foo.py added by another session):
12
+ importlib.import_module() to bring it into sys.modules so subsequent
13
+ lazy imports inside tool bodies succeed.
14
+
15
+ 3. **New @mcp.tool() decoration** in a freshly added module (ai/tools/*.py):
16
+ walk the module globals for fastmcp.tools.tool.FunctionTool instances
17
+ and add them to the live FastMCP tool_manager via add_tool(). New tool
18
+ files become callable without a server restart.
19
+
20
+ Out of scope (still requires restart):
21
+ - Edits to ai/server.py itself. That module is too large, has too many
22
+ side effects on import, and reloading it would create a NEW FastMCP
23
+ instance disconnected from the running server. Convention: put NEW
24
+ tools in ai/tools/<name>.py, not in ai/server.py.
25
+
26
+ Dead-letter behavior: every reload/import is wrapped in try/except. Failures
27
+ are logged to ~/.delimit/logs/hot_reload.jsonl and never crash the server.
28
+ """
29
+
30
+ from __future__ import annotations
31
+
32
+ import importlib
33
+ import json
34
+ import logging
35
+ import os
36
+ import sys
37
+ import threading
38
+ import time
39
+ import traceback
40
+ from datetime import datetime, timezone
41
+ from pathlib import Path
42
+ from typing import Any, Callable, Dict, List, Optional, Set
43
+
44
+ logger = logging.getLogger("delimit.ai.hot_reload")
45
+
46
+ LOG_DIR = Path.home() / ".delimit" / "logs"
47
+ LOG_FILE = LOG_DIR / "hot_reload.jsonl"
48
+
49
+ # Modules whose reload would do more harm than good. server.py defines the
50
+ # live FastMCP instance — reloading it would create a fresh disconnected
51
+ # instance. Tests confirm reload of these modules creates duplicate state.
52
+ RELOAD_DENY_LIST: Set[str] = {
53
+ "ai.server",
54
+ "ai.hot_reload", # don't reload self
55
+ "ai", # the package itself
56
+ }
57
+
58
+
59
+ # ── logging ──────────────────────────────────────────────────────────
60
+
61
+
62
+ def _log(event: Dict[str, Any]) -> None:
63
+ """Append a structured event to the hot-reload audit log. Never raises."""
64
+ try:
65
+ LOG_DIR.mkdir(parents=True, exist_ok=True)
66
+ event = {
67
+ **event,
68
+ "timestamp": datetime.now(timezone.utc).isoformat(),
69
+ "pid": os.getpid(),
70
+ }
71
+ with open(LOG_FILE, "a", encoding="utf-8") as f:
72
+ f.write(json.dumps(event) + "\n")
73
+ except OSError as e:
74
+ logger.debug("hot_reload log write failed: %s", e)
75
+
76
+
77
+ # ── tool re-registration ──────────────────────────────────────────────
78
+
79
+
80
+ def _is_function_tool(obj: Any) -> bool:
81
+ """True if `obj` is a fastmcp FunctionTool (registered tool)."""
82
+ cls = type(obj)
83
+ return cls.__module__.startswith("fastmcp.") and cls.__name__ == "FunctionTool"
84
+
85
+
86
+ def register_module_tools(mcp: Any, module: Any) -> List[str]:
87
+ """Walk a module's globals and register every FunctionTool against the live mcp.
88
+
89
+ Returns the list of tool keys registered. Existing tools with the same
90
+ key are *replaced* — that lets edits to a tool's metadata or schema
91
+ take effect without a restart.
92
+ """
93
+ if mcp is None or module is None:
94
+ return []
95
+ registered: List[str] = []
96
+ try:
97
+ tool_manager = getattr(mcp, "_tool_manager", None)
98
+ if tool_manager is None or not hasattr(tool_manager, "_tools"):
99
+ return []
100
+ for name, value in list(vars(module).items()):
101
+ if not _is_function_tool(value):
102
+ continue
103
+ try:
104
+ key = getattr(value, "key", name)
105
+ tool_manager._tools[key] = value
106
+ registered.append(key)
107
+ except Exception as e:
108
+ _log({
109
+ "event": "tool_register_failed",
110
+ "module": module.__name__,
111
+ "name": name,
112
+ "error": str(e),
113
+ })
114
+ except Exception as e: # noqa: BLE001
115
+ _log({
116
+ "event": "register_module_tools_failed",
117
+ "module": getattr(module, "__name__", "?"),
118
+ "error": str(e),
119
+ "traceback": traceback.format_exc(limit=3),
120
+ })
121
+ if registered:
122
+ _log({
123
+ "event": "tools_registered",
124
+ "module": getattr(module, "__name__", "?"),
125
+ "count": len(registered),
126
+ "keys": registered,
127
+ })
128
+ return registered
129
+
130
+
131
+ def reload_module(mcp: Any, module_name: str) -> Dict[str, Any]:
132
+ """Reload an existing module and re-register any tools it defines.
133
+
134
+ Returns a status dict with the module name, whether the reload succeeded,
135
+ and the list of tool keys registered. Reload failures keep the previous
136
+ module in place (importlib.reload either replaces atomically or raises).
137
+ """
138
+ if module_name in RELOAD_DENY_LIST:
139
+ return {"module": module_name, "ok": False, "skipped": "deny_list"}
140
+ if module_name not in sys.modules:
141
+ return {"module": module_name, "ok": False, "skipped": "not_loaded"}
142
+ try:
143
+ module = importlib.reload(sys.modules[module_name])
144
+ tools = register_module_tools(mcp, module)
145
+ _log({
146
+ "event": "module_reloaded",
147
+ "module": module_name,
148
+ "tools_registered": tools,
149
+ })
150
+ return {"module": module_name, "ok": True, "tools_registered": tools}
151
+ except Exception as e: # noqa: BLE001
152
+ _log({
153
+ "event": "module_reload_failed",
154
+ "module": module_name,
155
+ "error": str(e),
156
+ "traceback": traceback.format_exc(limit=5),
157
+ })
158
+ return {"module": module_name, "ok": False, "error": str(e)}
159
+
160
+
161
+ def import_new_module(
162
+ mcp: Any,
163
+ file_path: Path,
164
+ package_root: Path,
165
+ package_prefix: str = "ai",
166
+ ) -> Dict[str, Any]:
167
+ """Import a freshly added file under the watched package and register its tools.
168
+
169
+ `file_path` must live under `package_root`. The module name is derived
170
+ from the relative path: ai/tools/foo.py → ai.tools.foo.
171
+ """
172
+ try:
173
+ rel = file_path.relative_to(package_root)
174
+ except ValueError:
175
+ return {"file": str(file_path), "ok": False, "error": "outside_package_root"}
176
+
177
+ parts = list(rel.with_suffix("").parts)
178
+ if not parts:
179
+ return {"file": str(file_path), "ok": False, "error": "invalid_path"}
180
+ if parts[-1] == "__init__":
181
+ parts = parts[:-1]
182
+ if package_prefix:
183
+ # The package_root is the directory CONTAINING the package (e.g. delimit-gateway/),
184
+ # so the relative path already starts with the package name. If not, prepend.
185
+ if not parts or parts[0] != package_prefix:
186
+ parts = [package_prefix] + parts
187
+ module_name = ".".join(parts)
188
+
189
+ if module_name in RELOAD_DENY_LIST:
190
+ return {"file": str(file_path), "module": module_name, "ok": False, "skipped": "deny_list"}
191
+
192
+ try:
193
+ # Critical: drop cached finders so a new file inside an already-imported
194
+ # package becomes visible. Without this, importlib's package finder
195
+ # uses a stale directory listing.
196
+ importlib.invalidate_caches()
197
+ if module_name in sys.modules:
198
+ module = importlib.reload(sys.modules[module_name])
199
+ action = "reloaded"
200
+ else:
201
+ module = importlib.import_module(module_name)
202
+ action = "imported"
203
+ tools = register_module_tools(mcp, module)
204
+ _log({
205
+ "event": "new_module_handled",
206
+ "module": module_name,
207
+ "action": action,
208
+ "tools_registered": tools,
209
+ })
210
+ return {
211
+ "file": str(file_path),
212
+ "module": module_name,
213
+ "action": action,
214
+ "ok": True,
215
+ "tools_registered": tools,
216
+ }
217
+ except Exception as e: # noqa: BLE001
218
+ _log({
219
+ "event": "new_module_import_failed",
220
+ "module": module_name,
221
+ "error": str(e),
222
+ "traceback": traceback.format_exc(limit=5),
223
+ })
224
+ return {
225
+ "file": str(file_path),
226
+ "module": module_name,
227
+ "ok": False,
228
+ "error": str(e),
229
+ }
230
+
231
+
232
+ # ── file watcher ──────────────────────────────────────────────────────
233
+
234
+
235
+ class HotReloadWatcher:
236
+ """Polling-based file watcher (no inotify dependency).
237
+
238
+ Tracks mtimes for every .py file under `watch_dir`. On each tick:
239
+ - New files trigger import_new_module().
240
+ - Changed files trigger reload_module() (unless on the deny list).
241
+ - Deleted files are noted in the log but no action is taken (the
242
+ cached sys.modules entry stays — that's safer than fighting against
243
+ another session that may be mid-edit).
244
+ """
245
+
246
+ def __init__(
247
+ self,
248
+ mcp: Any,
249
+ watch_dir: Path,
250
+ package_root: Path,
251
+ package_prefix: str = "ai",
252
+ interval: float = 2.0,
253
+ ) -> None:
254
+ self.mcp = mcp
255
+ self.watch_dir = Path(watch_dir)
256
+ self.package_root = Path(package_root)
257
+ self.package_prefix = package_prefix
258
+ self.interval = interval
259
+ self._mtimes: Dict[str, float] = {}
260
+ self._stop = threading.Event()
261
+ self._thread: Optional[threading.Thread] = None
262
+ self._snapshot_initial()
263
+
264
+ def _snapshot_initial(self) -> None:
265
+ """Record current mtimes so the first tick doesn't reload everything."""
266
+ for path in self.watch_dir.rglob("*.py"):
267
+ try:
268
+ self._mtimes[str(path)] = path.stat().st_mtime
269
+ except OSError:
270
+ pass
271
+
272
+ def tick(self) -> Dict[str, Any]:
273
+ """Run a single scan pass. Returns counts of actions taken."""
274
+ new_files: List[Path] = []
275
+ changed_files: List[Path] = []
276
+ seen: Set[str] = set()
277
+
278
+ try:
279
+ for path in self.watch_dir.rglob("*.py"):
280
+ key = str(path)
281
+ seen.add(key)
282
+ try:
283
+ mtime = path.stat().st_mtime
284
+ except OSError:
285
+ continue
286
+ prev = self._mtimes.get(key)
287
+ if prev is None:
288
+ new_files.append(path)
289
+ elif mtime > prev:
290
+ changed_files.append(path)
291
+ self._mtimes[key] = mtime
292
+ except OSError as e:
293
+ _log({"event": "watch_scan_error", "error": str(e)})
294
+ return {"new": 0, "changed": 0, "errors": 1}
295
+
296
+ results: Dict[str, Any] = {"new": [], "changed": [], "errors": 0}
297
+ for path in new_files:
298
+ r = import_new_module(self.mcp, path, self.package_root, self.package_prefix)
299
+ results["new"].append(r)
300
+ if not r.get("ok"):
301
+ results["errors"] += 1
302
+
303
+ for path in changed_files:
304
+ module_name = self._path_to_module(path)
305
+ if module_name is None:
306
+ continue
307
+ if module_name in RELOAD_DENY_LIST:
308
+ continue
309
+ r = reload_module(self.mcp, module_name)
310
+ results["changed"].append(r)
311
+ if not r.get("ok") and r.get("skipped") is None:
312
+ results["errors"] += 1
313
+
314
+ return results
315
+
316
+ def _path_to_module(self, path: Path) -> Optional[str]:
317
+ try:
318
+ rel = path.relative_to(self.package_root)
319
+ except ValueError:
320
+ return None
321
+ parts = list(rel.with_suffix("").parts)
322
+ if not parts:
323
+ return None
324
+ if parts[-1] == "__init__":
325
+ parts = parts[:-1]
326
+ if self.package_prefix and (not parts or parts[0] != self.package_prefix):
327
+ parts = [self.package_prefix] + parts
328
+ return ".".join(parts)
329
+
330
+ def _loop(self) -> None:
331
+ _log({"event": "watcher_started", "watch_dir": str(self.watch_dir), "interval": self.interval})
332
+ while not self._stop.is_set():
333
+ try:
334
+ self.tick()
335
+ except Exception as e: # noqa: BLE001
336
+ _log({
337
+ "event": "watcher_tick_error",
338
+ "error": str(e),
339
+ "traceback": traceback.format_exc(limit=3),
340
+ })
341
+ self._stop.wait(timeout=self.interval)
342
+ _log({"event": "watcher_stopped"})
343
+
344
+ def start(self) -> None:
345
+ if self._thread and self._thread.is_alive():
346
+ return
347
+ self._stop.clear()
348
+ self._thread = threading.Thread(
349
+ target=self._loop, name="delimit-hot-reload", daemon=True
350
+ )
351
+ self._thread.start()
352
+
353
+ def stop(self) -> None:
354
+ self._stop.set()
355
+ if self._thread:
356
+ self._thread.join(timeout=5)
357
+
358
+
359
+ # ── module-level singleton + bootstrap helper ─────────────────────────
360
+
361
+
362
+ _singleton: Optional[HotReloadWatcher] = None
363
+ _singleton_lock = threading.Lock()
364
+
365
+
366
+ def start_hot_reload(
367
+ mcp: Any,
368
+ watch_dir: Optional[Path] = None,
369
+ package_root: Optional[Path] = None,
370
+ interval: float = 2.0,
371
+ ) -> Dict[str, Any]:
372
+ """Start the global hot-reload watcher. Idempotent.
373
+
374
+ Args:
375
+ mcp: The live FastMCP instance from ai/server.py.
376
+ watch_dir: Directory to watch. Defaults to the directory containing
377
+ ai/server.py (i.e. the ai/ package directory).
378
+ package_root: Directory whose first child is the package. Used to
379
+ derive module names from file paths. Defaults to the parent of
380
+ watch_dir.
381
+ interval: Poll interval in seconds. Default 2.0.
382
+
383
+ Returns a status dict. Will not raise — failures are logged.
384
+ """
385
+ global _singleton
386
+ with _singleton_lock:
387
+ if _singleton is not None:
388
+ return {"status": "already_running"}
389
+ try:
390
+ if watch_dir is None:
391
+ watch_dir = Path(__file__).parent
392
+ if package_root is None:
393
+ package_root = Path(watch_dir).parent
394
+ _singleton = HotReloadWatcher(
395
+ mcp=mcp,
396
+ watch_dir=Path(watch_dir),
397
+ package_root=Path(package_root),
398
+ interval=interval,
399
+ )
400
+ _singleton.start()
401
+ _log({
402
+ "event": "hot_reload_started",
403
+ "watch_dir": str(watch_dir),
404
+ "package_root": str(package_root),
405
+ "interval": interval,
406
+ })
407
+ return {
408
+ "status": "started",
409
+ "watch_dir": str(watch_dir),
410
+ "package_root": str(package_root),
411
+ "interval": interval,
412
+ }
413
+ except Exception as e: # noqa: BLE001
414
+ _log({
415
+ "event": "hot_reload_start_failed",
416
+ "error": str(e),
417
+ "traceback": traceback.format_exc(limit=5),
418
+ })
419
+ return {"status": "failed", "error": str(e)}
420
+
421
+
422
+ def stop_hot_reload() -> Dict[str, Any]:
423
+ """Stop the global watcher. Idempotent."""
424
+ global _singleton
425
+ with _singleton_lock:
426
+ if _singleton is None:
427
+ return {"status": "not_running"}
428
+ _singleton.stop()
429
+ _singleton = None
430
+ _log({"event": "hot_reload_stopped_via_api"})
431
+ return {"status": "stopped"}
432
+
433
+
434
+ def hot_reload_status() -> Dict[str, Any]:
435
+ """Inspect the watcher state."""
436
+ with _singleton_lock:
437
+ if _singleton is None:
438
+ return {"running": False}
439
+ return {
440
+ "running": True,
441
+ "watch_dir": str(_singleton.watch_dir),
442
+ "package_root": str(_singleton.package_root),
443
+ "interval": _singleton.interval,
444
+ "tracked_files": len(_singleton._mtimes),
445
+ }
@@ -0,0 +1,217 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Standalone runner for the Delimit inbox polling daemon.
4
+
5
+ Designed for use with systemd or manual invocation. Adds:
6
+ - Structured logging with timestamps
7
+ - Graceful SIGTERM handling for clean systemd stop
8
+ - PID file to prevent duplicate instances
9
+ - Startup validation of required configuration
10
+
11
+ Usage:
12
+ # Via systemd (see deploy/inbox-daemon.service)
13
+ systemctl start delimit-inbox-daemon
14
+
15
+ # Manual foreground run
16
+ python3 ai/inbox_daemon_runner.py
17
+
18
+ # Single poll cycle (for testing)
19
+ python3 ai/inbox_daemon_runner.py --once
20
+
21
+ Environment variables:
22
+ DELIMIT_SMTP_PASS Required. IMAP/SMTP password.
23
+ DELIMIT_INBOX_POLL_INTERVAL Poll interval in seconds (default: 300).
24
+ DELIMIT_HOME Delimit config directory (default: ~/.delimit).
25
+ PYTHONPATH Must include the gateway root for ai.* imports.
26
+ """
27
+
28
+ import logging
29
+ import os
30
+ import signal
31
+ import sys
32
+ import time
33
+ from datetime import datetime, timezone
34
+ from pathlib import Path
35
+
36
+ # Ensure the gateway root is on sys.path so ai.* imports work
37
+ _gateway_root = Path(__file__).resolve().parent.parent
38
+ if str(_gateway_root) not in sys.path:
39
+ sys.path.insert(0, str(_gateway_root))
40
+
41
+ # PID file to prevent duplicate instances
42
+ PID_DIR = Path(os.environ.get("DELIMIT_HOME", Path.home() / ".delimit"))
43
+ PID_FILE = PID_DIR / "inbox-daemon.pid"
44
+
45
+
46
+ def _setup_logging() -> logging.Logger:
47
+ """Configure structured logging for journald and console."""
48
+ log_format = "%(asctime)s [%(name)s] %(levelname)s: %(message)s"
49
+ logging.basicConfig(
50
+ level=logging.INFO,
51
+ format=log_format,
52
+ stream=sys.stdout,
53
+ )
54
+ # Suppress noisy libraries
55
+ logging.getLogger("urllib3").setLevel(logging.WARNING)
56
+ logging.getLogger("imaplib").setLevel(logging.WARNING)
57
+ return logging.getLogger("delimit.inbox_daemon_runner")
58
+
59
+
60
+ def _write_pid() -> None:
61
+ """Write PID file. Check for stale processes first."""
62
+ PID_DIR.mkdir(parents=True, exist_ok=True)
63
+
64
+ if PID_FILE.exists():
65
+ try:
66
+ old_pid = int(PID_FILE.read_text().strip())
67
+ # Check if the old process is still running
68
+ os.kill(old_pid, 0)
69
+ # Process exists -- abort to prevent duplicates
70
+ print(
71
+ f"ERROR: Another inbox daemon is running (PID {old_pid}). "
72
+ f"Remove {PID_FILE} if stale.",
73
+ file=sys.stderr,
74
+ )
75
+ sys.exit(1)
76
+ except (ValueError, ProcessLookupError, PermissionError):
77
+ # Stale PID file -- safe to overwrite
78
+ pass
79
+ except OSError:
80
+ pass
81
+
82
+ PID_FILE.write_text(str(os.getpid()))
83
+
84
+
85
+ def _remove_pid() -> None:
86
+ """Remove PID file on clean shutdown."""
87
+ try:
88
+ if PID_FILE.exists():
89
+ current_pid = PID_FILE.read_text().strip()
90
+ if current_pid == str(os.getpid()):
91
+ PID_FILE.unlink()
92
+ except OSError:
93
+ pass
94
+
95
+
96
+ def _validate_config(logger: logging.Logger) -> bool:
97
+ """Validate required configuration before starting the daemon."""
98
+ ok = True
99
+
100
+ if not os.environ.get("DELIMIT_SMTP_PASS"):
101
+ # Check if the notify module can load credentials from config
102
+ try:
103
+ from ai.notify import _load_smtp_account, IMAP_USER
104
+ if IMAP_USER:
105
+ account = _load_smtp_account(IMAP_USER)
106
+ if account and (account.get("pass") or account.get("password")):
107
+ logger.info("SMTP credentials loaded from config for %s", IMAP_USER)
108
+ else:
109
+ logger.error(
110
+ "DELIMIT_SMTP_PASS not set and no credentials found in config for %s",
111
+ IMAP_USER,
112
+ )
113
+ ok = False
114
+ else:
115
+ logger.error("DELIMIT_SMTP_PASS not set and IMAP_USER not configured")
116
+ ok = False
117
+ except ImportError:
118
+ logger.error("DELIMIT_SMTP_PASS not set and ai.notify module not importable")
119
+ ok = False
120
+ else:
121
+ logger.info("SMTP credentials provided via environment")
122
+
123
+ return ok
124
+
125
+
126
+ def main() -> None:
127
+ import argparse
128
+
129
+ parser = argparse.ArgumentParser(
130
+ description="Delimit inbox daemon runner -- persistent email governance polling",
131
+ )
132
+ parser.add_argument(
133
+ "--once",
134
+ action="store_true",
135
+ help="Run a single poll cycle and exit",
136
+ )
137
+ parser.add_argument(
138
+ "--interval",
139
+ type=int,
140
+ default=None,
141
+ help="Override poll interval in seconds",
142
+ )
143
+ args = parser.parse_args()
144
+
145
+ logger = _setup_logging()
146
+ logger.info(
147
+ "Delimit inbox daemon runner starting (PID %d, Python %s)",
148
+ os.getpid(),
149
+ sys.version.split()[0],
150
+ )
151
+
152
+ # Validate config before doing anything else
153
+ if not _validate_config(logger):
154
+ logger.error("Configuration validation failed. Exiting.")
155
+ sys.exit(1)
156
+
157
+ # Import the daemon module (after PYTHONPATH is set up)
158
+ from ai.inbox_daemon import (
159
+ _daemon_state,
160
+ _daemon_loop,
161
+ poll_once,
162
+ POLL_INTERVAL,
163
+ )
164
+
165
+ # Override poll interval if requested
166
+ if args.interval is not None:
167
+ import ai.inbox_daemon
168
+ ai.inbox_daemon.POLL_INTERVAL = args.interval
169
+ logger.info("Poll interval overridden to %d seconds", args.interval)
170
+
171
+ # Single-shot mode
172
+ if args.once:
173
+ logger.info("Running single poll cycle (--once mode)")
174
+ result = poll_once()
175
+ if "error" in result:
176
+ logger.error("Poll failed: %s", result["error"])
177
+ sys.exit(1)
178
+ logger.info(
179
+ "Poll complete: %d processed, %d forwarded",
180
+ result.get("processed", 0),
181
+ result.get("forwarded", 0),
182
+ )
183
+ return
184
+
185
+ # Write PID file (only for long-running mode)
186
+ _write_pid()
187
+
188
+ # Graceful shutdown handler
189
+ def _handle_signal(signum, frame):
190
+ sig_name = signal.Signals(signum).name
191
+ logger.info("Received %s -- initiating graceful shutdown", sig_name)
192
+ _daemon_state._stop_event.set()
193
+
194
+ signal.signal(signal.SIGTERM, _handle_signal)
195
+ signal.signal(signal.SIGINT, _handle_signal)
196
+
197
+ # Start the daemon loop (blocks until stop event)
198
+ logger.info(
199
+ "Inbox daemon entering main loop (poll interval: %ds)",
200
+ ai.inbox_daemon.POLL_INTERVAL,
201
+ )
202
+ _daemon_state.running = True
203
+ _daemon_state._stop_event.clear()
204
+
205
+ try:
206
+ _daemon_loop()
207
+ except Exception as e:
208
+ logger.critical("Daemon loop crashed: %s", e, exc_info=True)
209
+ sys.exit(1)
210
+ finally:
211
+ _daemon_state.running = False
212
+ _remove_pid()
213
+ logger.info("Inbox daemon runner exiting cleanly")
214
+
215
+
216
+ if __name__ == "__main__":
217
+ main()