gobby 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/claude_code.py +3 -26
  3. gobby/app_context.py +59 -0
  4. gobby/cli/utils.py +5 -17
  5. gobby/config/features.py +0 -20
  6. gobby/config/tasks.py +4 -0
  7. gobby/hooks/event_handlers/__init__.py +155 -0
  8. gobby/hooks/event_handlers/_agent.py +175 -0
  9. gobby/hooks/event_handlers/_base.py +87 -0
  10. gobby/hooks/event_handlers/_misc.py +66 -0
  11. gobby/hooks/event_handlers/_session.py +573 -0
  12. gobby/hooks/event_handlers/_tool.py +196 -0
  13. gobby/hooks/hook_manager.py +2 -0
  14. gobby/llm/claude.py +377 -42
  15. gobby/mcp_proxy/importer.py +4 -41
  16. gobby/mcp_proxy/manager.py +13 -3
  17. gobby/mcp_proxy/registries.py +14 -0
  18. gobby/mcp_proxy/services/recommendation.py +2 -28
  19. gobby/mcp_proxy/tools/artifacts.py +3 -3
  20. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  21. gobby/mcp_proxy/tools/workflows/__init__.py +266 -0
  22. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  23. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  24. gobby/mcp_proxy/tools/workflows/_lifecycle.py +321 -0
  25. gobby/mcp_proxy/tools/workflows/_query.py +207 -0
  26. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  27. gobby/mcp_proxy/tools/workflows/_terminal.py +139 -0
  28. gobby/memory/components/__init__.py +0 -0
  29. gobby/memory/components/ingestion.py +98 -0
  30. gobby/memory/components/search.py +108 -0
  31. gobby/memory/manager.py +16 -25
  32. gobby/paths.py +51 -0
  33. gobby/prompts/loader.py +1 -35
  34. gobby/runner.py +23 -10
  35. gobby/servers/http.py +186 -149
  36. gobby/servers/routes/admin.py +12 -0
  37. gobby/servers/routes/mcp/endpoints/execution.py +15 -7
  38. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  39. gobby/sessions/analyzer.py +2 -2
  40. gobby/skills/parser.py +23 -0
  41. gobby/skills/sync.py +5 -4
  42. gobby/storage/artifacts.py +19 -0
  43. gobby/storage/migrations.py +25 -2
  44. gobby/storage/skills.py +47 -7
  45. gobby/tasks/external_validator.py +4 -17
  46. gobby/tasks/validation.py +13 -87
  47. gobby/tools/summarizer.py +18 -51
  48. gobby/utils/status.py +13 -0
  49. gobby/workflows/actions.py +5 -0
  50. gobby/workflows/context_actions.py +21 -24
  51. gobby/workflows/enforcement/__init__.py +11 -1
  52. gobby/workflows/enforcement/blocking.py +96 -0
  53. gobby/workflows/enforcement/handlers.py +35 -1
  54. gobby/workflows/engine.py +6 -3
  55. gobby/workflows/lifecycle_evaluator.py +2 -1
  56. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/METADATA +1 -1
  57. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/RECORD +61 -45
  58. gobby/hooks/event_handlers.py +0 -1008
  59. gobby/mcp_proxy/tools/workflows.py +0 -1023
  60. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/WHEEL +0 -0
  61. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/entry_points.txt +0 -0
  62. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/licenses/LICENSE.md +0 -0
  63. {gobby-0.2.8.dist-info → gobby-0.2.9.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,78 @@
1
+ """
2
+ Resolution utilities for workflow tools.
3
+
4
+ Provides functions to resolve session and task references from various
5
+ formats (#N, N, UUID, prefix) to canonical UUIDs.
6
+ """
7
+
8
+ import logging
9
+ from typing import Any
10
+
11
+ from gobby.storage.database import DatabaseProtocol
12
+ from gobby.storage.sessions import LocalSessionManager
13
+ from gobby.storage.tasks._id import resolve_task_reference
14
+ from gobby.storage.tasks._models import TaskNotFoundError
15
+ from gobby.utils.project_context import get_project_context
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ def resolve_session_id(session_manager: LocalSessionManager, ref: str) -> str:
21
+ """Resolve session reference (#N, N, UUID, or prefix) to UUID."""
22
+ project_ctx = get_project_context()
23
+ project_id = project_ctx.get("id") if project_ctx else None
24
+ return session_manager.resolve_session_reference(ref, project_id)
25
+
26
+
27
+ def resolve_session_task_value(
28
+ value: Any,
29
+ session_id: str | None,
30
+ session_manager: LocalSessionManager,
31
+ db: DatabaseProtocol,
32
+ ) -> Any:
33
+ """
34
+ Resolve a session_task value from seq_num reference (#N or N) to UUID.
35
+
36
+ This prevents repeated resolution failures in condition evaluation when
37
+ task_tree_complete() is called with a seq_num that requires project_id.
38
+
39
+ Args:
40
+ value: The value to potentially resolve (e.g., "#4424", "47", or a UUID)
41
+ session_id: Session ID to look up project_id
42
+ session_manager: Session manager for lookups
43
+ db: Database for task resolution
44
+
45
+ Returns:
46
+ Resolved UUID if value was a seq_num reference, otherwise original value
47
+ """
48
+ # Only process string values that look like seq_num references
49
+ if not isinstance(value, str):
50
+ return value
51
+
52
+ # Check if it's a seq_num reference (#N or plain N)
53
+ is_seq_ref = value.startswith("#") or value.isdigit()
54
+ if not is_seq_ref:
55
+ return value
56
+
57
+ # Need session to get project_id
58
+ if not session_id:
59
+ logger.warning(f"Cannot resolve task reference '{value}': no session_id provided")
60
+ return value
61
+
62
+ # Get project_id from session
63
+ session = session_manager.get(session_id)
64
+ if not session or not session.project_id:
65
+ logger.warning(f"Cannot resolve task reference '{value}': session has no project_id")
66
+ return value
67
+
68
+ # Resolve the reference
69
+ try:
70
+ resolved = resolve_task_reference(db, value, session.project_id)
71
+ logger.debug(f"Resolved session_task '{value}' to UUID '{resolved}'")
72
+ return resolved
73
+ except TaskNotFoundError as e:
74
+ logger.warning(f"Could not resolve task reference '{value}': {e}")
75
+ return value
76
+ except Exception as e:
77
+ logger.warning(f"Unexpected error resolving task reference '{value}': {e}")
78
+ return value
@@ -0,0 +1,139 @@
1
+ """
2
+ Terminal tools for workflows.
3
+ """
4
+
5
+ import asyncio
6
+ import logging
7
+ import os
8
+ import stat
9
+ import subprocess # nosec B404
10
+ from pathlib import Path
11
+ from typing import Any
12
+
13
+ from gobby.paths import get_install_dir
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ async def close_terminal(
19
+ signal: str = "TERM",
20
+ delay_ms: int = 0,
21
+ ) -> dict[str, Any]:
22
+ """
23
+ Close the current terminal by running the agent shutdown script.
24
+
25
+ This is for agent self-termination (meeseeks-style). The agent calls
26
+ this to close its own terminal window when done with its workflow.
27
+
28
+ The script is located at ~/.gobby/scripts/agent_shutdown.sh and is
29
+ automatically rebuilt if missing. It handles different terminal types
30
+ (tmux, iTerm, Terminal.app, Ghostty, Kitty, WezTerm, etc.).
31
+
32
+ Args:
33
+ signal: Signal to use for shutdown (TERM, KILL, INT). Default: TERM.
34
+ delay_ms: Optional delay in milliseconds before shutdown. Default: 0.
35
+
36
+ Returns:
37
+ Dict with success status and message.
38
+ """
39
+ # Script location
40
+ gobby_dir = Path.home() / ".gobby"
41
+ scripts_dir = gobby_dir / "scripts"
42
+ script_path = scripts_dir / "agent_shutdown.sh"
43
+
44
+ # Source script from the install directory (single source of truth)
45
+ source_script_path = get_install_dir() / "shared" / "scripts" / "agent_shutdown.sh"
46
+
47
+ def get_script_version(script_content: str) -> str | None:
48
+ """Extract VERSION marker from script content."""
49
+ import re
50
+
51
+ match = re.search(r"^# VERSION:\s*(.+)$", script_content, re.MULTILINE)
52
+ return match.group(1).strip() if match else None
53
+
54
+ # Ensure directories exist and script is present/up-to-date
55
+ script_rebuilt = False
56
+ try:
57
+ scripts_dir.mkdir(parents=True, exist_ok=True)
58
+
59
+ # Read source script content
60
+ if source_script_path.exists():
61
+ source_content = source_script_path.read_text()
62
+ source_version = get_script_version(source_content)
63
+ else:
64
+ logger.warning(f"Source shutdown script not found at {source_script_path}")
65
+ source_content = None
66
+ source_version = None
67
+
68
+ # Check if installed script exists and compare versions
69
+ needs_rebuild = False
70
+ if not script_path.exists():
71
+ needs_rebuild = True
72
+ elif source_content:
73
+ installed_content = script_path.read_text()
74
+ installed_version = get_script_version(installed_content)
75
+ # Rebuild if versions differ or installed has no version marker
76
+ if installed_version != source_version:
77
+ needs_rebuild = True
78
+ logger.info(
79
+ f"Shutdown script version mismatch: installed={installed_version}, source={source_version}"
80
+ )
81
+
82
+ if needs_rebuild:
83
+ if not source_content:
84
+ logger.error(
85
+ f"Cannot rebuild shutdown script at {script_path}: "
86
+ f"source script not found at {source_script_path}"
87
+ )
88
+ return {
89
+ "success": False,
90
+ "error": f"Source shutdown script not found at {source_script_path}",
91
+ }
92
+ script_path.write_text(source_content)
93
+ # Make executable
94
+ script_path.chmod(script_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP)
95
+ script_rebuilt = True
96
+ logger.info(f"Created/updated agent shutdown script at {script_path}")
97
+ except OSError as e:
98
+ return {
99
+ "success": False,
100
+ "error": f"Failed to create shutdown script: {e}",
101
+ }
102
+
103
+ # Validate signal
104
+ valid_signals = {"TERM", "KILL", "INT", "HUP", "QUIT"}
105
+ if signal.upper() not in valid_signals:
106
+ return {
107
+ "success": False,
108
+ "error": f"Invalid signal '{signal}'. Valid: {valid_signals}",
109
+ }
110
+
111
+ # Apply delay before launching script (non-blocking)
112
+ if delay_ms > 0:
113
+ await asyncio.sleep(delay_ms / 1000.0)
114
+
115
+ # Launch the script
116
+ try:
117
+ # Run in background - we don't wait for it since it kills our process
118
+ env = os.environ.copy()
119
+
120
+ subprocess.Popen( # nosec B603 - script path is from gobby scripts directory
121
+ [str(script_path), signal.upper(), "0"], # Delay already applied
122
+ env=env,
123
+ start_new_session=True, # Detach from parent
124
+ stdout=subprocess.DEVNULL,
125
+ stderr=subprocess.DEVNULL,
126
+ )
127
+
128
+ return {
129
+ "success": True,
130
+ "message": "Shutdown script launched",
131
+ "script_path": str(script_path),
132
+ "script_rebuilt": script_rebuilt,
133
+ "signal": signal.upper(),
134
+ }
135
+ except OSError as e:
136
+ return {
137
+ "success": False,
138
+ "error": f"Failed to launch shutdown script: {e}",
139
+ }
File without changes
@@ -0,0 +1,98 @@
1
+ """
2
+ Component for handling Memory Manager's multimodal ingestion logic.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+ from typing import TYPE_CHECKING
9
+
10
+ from gobby.memory.ingestion import MultimodalIngestor
11
+ from gobby.memory.protocol import MemoryBackendProtocol
12
+ from gobby.storage.memories import LocalMemoryManager, Memory
13
+
14
+ if TYPE_CHECKING:
15
+ from gobby.llm.service import LLMService
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ class IngestionService:
21
+ """Service for handling memory ingestion, particularly multimodal content."""
22
+
23
+ def __init__(
24
+ self,
25
+ storage: LocalMemoryManager,
26
+ backend: MemoryBackendProtocol,
27
+ llm_service: LLMService | None = None,
28
+ ):
29
+ self.storage = storage
30
+ self._backend = backend
31
+ self._llm_service = llm_service
32
+
33
+ self._multimodal_ingestor = MultimodalIngestor(
34
+ storage=storage,
35
+ backend=backend,
36
+ llm_service=llm_service,
37
+ )
38
+
39
+ @property
40
+ def llm_service(self) -> LLMService | None:
41
+ """Get the LLM service."""
42
+ return self._llm_service
43
+
44
+ @llm_service.setter
45
+ def llm_service(self, service: LLMService | None) -> None:
46
+ """Set the LLM service and propagate to ingestor."""
47
+ self._llm_service = service
48
+ self._multimodal_ingestor.llm_service = service
49
+
50
+ async def remember_with_image(
51
+ self,
52
+ image_path: str,
53
+ context: str | None = None,
54
+ memory_type: str = "fact",
55
+ importance: float = 0.5,
56
+ project_id: str | None = None,
57
+ source_type: str = "user",
58
+ source_session_id: str | None = None,
59
+ tags: list[str] | None = None,
60
+ ) -> Memory:
61
+ """
62
+ Store a memory with an image attachment.
63
+ """
64
+ return await self._multimodal_ingestor.remember_with_image(
65
+ image_path=image_path,
66
+ context=context,
67
+ memory_type=memory_type,
68
+ importance=importance,
69
+ project_id=project_id,
70
+ source_type=source_type,
71
+ source_session_id=source_session_id,
72
+ tags=tags,
73
+ )
74
+
75
+ async def remember_screenshot(
76
+ self,
77
+ screenshot_bytes: bytes,
78
+ context: str | None = None,
79
+ memory_type: str = "observation",
80
+ importance: float = 0.5,
81
+ project_id: str | None = None,
82
+ source_type: str = "user",
83
+ source_session_id: str | None = None,
84
+ tags: list[str] | None = None,
85
+ ) -> Memory:
86
+ """
87
+ Store a memory from raw screenshot bytes.
88
+ """
89
+ return await self._multimodal_ingestor.remember_screenshot(
90
+ screenshot_bytes=screenshot_bytes,
91
+ context=context,
92
+ memory_type=memory_type,
93
+ importance=importance,
94
+ project_id=project_id,
95
+ source_type=source_type,
96
+ source_session_id=source_session_id,
97
+ tags=tags,
98
+ )
@@ -0,0 +1,108 @@
1
+ """
2
+ Component for handling Memory Manager's search and cross-referencing logic.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import logging
8
+ from typing import Any
9
+
10
+ from gobby.config.persistence import MemoryConfig
11
+ from gobby.memory.search.coordinator import SearchCoordinator
12
+ from gobby.memory.services.crossref import CrossrefService
13
+ from gobby.storage.database import DatabaseProtocol
14
+ from gobby.storage.memories import LocalMemoryManager, Memory
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class SearchService:
20
+ """Service for handling memory search and cross-referencing."""
21
+
22
+ def __init__(
23
+ self,
24
+ storage: LocalMemoryManager,
25
+ config: MemoryConfig,
26
+ db: DatabaseProtocol,
27
+ ):
28
+ self.storage = storage
29
+ self.config = config
30
+
31
+ self._search_coordinator = SearchCoordinator(
32
+ storage=storage,
33
+ config=config,
34
+ db=db,
35
+ )
36
+
37
+ self._crossref_service = CrossrefService(
38
+ storage=storage,
39
+ config=config,
40
+ search_backend_getter=lambda: self._search_coordinator.search_backend,
41
+ ensure_fitted=self._search_coordinator.ensure_fitted,
42
+ )
43
+
44
+ @property
45
+ def backend(self) -> Any:
46
+ """Get the underlying search backend."""
47
+ return self._search_coordinator.search_backend
48
+
49
+ def ensure_fitted(self) -> None:
50
+ """Ensure the search backend is fitted with current memories."""
51
+ self._search_coordinator.ensure_fitted()
52
+
53
+ def mark_refit_needed(self) -> None:
54
+ """Mark that the search backend needs to be refitted."""
55
+ self._search_coordinator.mark_refit_needed()
56
+
57
+ def reindex(self) -> dict[str, Any]:
58
+ """Force rebuild of the search index."""
59
+ return self._search_coordinator.reindex()
60
+
61
+ def search(
62
+ self,
63
+ query: str,
64
+ project_id: str | None = None,
65
+ limit: int = 10,
66
+ min_importance: float | None = None,
67
+ search_mode: str | None = None,
68
+ tags_all: list[str] | None = None,
69
+ tags_any: list[str] | None = None,
70
+ tags_none: list[str] | None = None,
71
+ ) -> list[Memory]:
72
+ """Perform search using the configured search backend."""
73
+ return self._search_coordinator.search(
74
+ query=query,
75
+ project_id=project_id,
76
+ limit=limit,
77
+ min_importance=min_importance,
78
+ search_mode=search_mode,
79
+ tags_all=tags_all,
80
+ tags_any=tags_any,
81
+ tags_none=tags_none,
82
+ )
83
+
84
+ async def create_crossrefs(
85
+ self,
86
+ memory: Memory,
87
+ threshold: float | None = None,
88
+ max_links: int | None = None,
89
+ ) -> int:
90
+ """Find and link similar memories."""
91
+ return await self._crossref_service.create_crossrefs(
92
+ memory=memory,
93
+ threshold=threshold,
94
+ max_links=max_links,
95
+ )
96
+
97
+ async def get_related(
98
+ self,
99
+ memory_id: str,
100
+ limit: int = 5,
101
+ min_similarity: float = 0.0,
102
+ ) -> list[Memory]:
103
+ """Get memories linked to this one via cross-references."""
104
+ return await self._crossref_service.get_related(
105
+ memory_id=memory_id,
106
+ limit=limit,
107
+ min_similarity=min_similarity,
108
+ )
gobby/memory/manager.py CHANGED
@@ -6,11 +6,10 @@ from typing import TYPE_CHECKING, Any
6
6
 
7
7
  from gobby.config.persistence import MemoryConfig
8
8
  from gobby.memory.backends import get_backend
9
+ from gobby.memory.components.ingestion import IngestionService
10
+ from gobby.memory.components.search import SearchService
9
11
  from gobby.memory.context import build_memory_context
10
- from gobby.memory.ingestion import MultimodalIngestor
11
12
  from gobby.memory.protocol import MemoryBackendProtocol
12
- from gobby.memory.search.coordinator import SearchCoordinator
13
- from gobby.memory.services.crossref import CrossrefService
14
13
  from gobby.storage.database import DatabaseProtocol
15
14
  from gobby.storage.memories import LocalMemoryManager, Memory
16
15
 
@@ -46,20 +45,13 @@ class MemoryManager:
46
45
  self.storage = LocalMemoryManager(db)
47
46
 
48
47
  # Initialize extracted components
49
- self._search_coordinator = SearchCoordinator(
48
+ self._search_service = SearchService(
50
49
  storage=self.storage,
51
50
  config=config,
52
51
  db=db,
53
52
  )
54
53
 
55
- self._crossref_service = CrossrefService(
56
- storage=self.storage,
57
- config=config,
58
- search_backend_getter=lambda: self._search_coordinator.search_backend,
59
- ensure_fitted=self._search_coordinator.ensure_fitted,
60
- )
61
-
62
- self._multimodal_ingestor = MultimodalIngestor(
54
+ self._ingestion_service = IngestionService(
63
55
  storage=self.storage,
64
56
  backend=self._backend,
65
57
  llm_service=llm_service,
@@ -68,14 +60,13 @@ class MemoryManager:
68
60
  @property
69
61
  def llm_service(self) -> LLMService | None:
70
62
  """Get the LLM service for image description."""
71
- return self._llm_service
63
+ return self._ingestion_service.llm_service
72
64
 
73
65
  @llm_service.setter
74
66
  def llm_service(self, service: LLMService | None) -> None:
75
67
  """Set the LLM service for image description."""
76
68
  self._llm_service = service
77
- # Keep multimodal ingestor in sync
78
- self._multimodal_ingestor.llm_service = service
69
+ self._ingestion_service.llm_service = service
79
70
 
80
71
  @property
81
72
  def search_backend(self) -> Any:
@@ -86,15 +77,15 @@ class MemoryManager:
86
77
  - "tfidf" (default): Zero-dependency TF-IDF search
87
78
  - "text": Simple text substring matching
88
79
  """
89
- return self._search_coordinator.search_backend
80
+ return self._search_service.backend
90
81
 
91
82
  def _ensure_search_backend_fitted(self) -> None:
92
83
  """Ensure the search backend is fitted with current memories."""
93
- self._search_coordinator.ensure_fitted()
84
+ self._search_service.ensure_fitted()
94
85
 
95
86
  def mark_search_refit_needed(self) -> None:
96
87
  """Mark that the search backend needs to be refitted."""
97
- self._search_coordinator.mark_refit_needed()
88
+ self._search_service.mark_refit_needed()
98
89
 
99
90
  def reindex_search(self) -> dict[str, Any]:
100
91
  """
@@ -109,7 +100,7 @@ class MemoryManager:
109
100
  Returns:
110
101
  Dict with index statistics including memory_count, backend_type, etc.
111
102
  """
112
- return self._search_coordinator.reindex()
103
+ return self._search_service.reindex()
113
104
 
114
105
  async def remember(
115
106
  self,
@@ -161,7 +152,7 @@ class MemoryManager:
161
152
  # Auto cross-reference if enabled
162
153
  if getattr(self.config, "auto_crossref", False):
163
154
  try:
164
- await self._crossref_service.create_crossrefs(memory)
155
+ await self._search_service.create_crossrefs(memory)
165
156
  except Exception as e:
166
157
  # Don't fail the remember if crossref fails
167
158
  logger.warning(f"Auto-crossref failed for {memory.id}: {e}")
@@ -202,7 +193,7 @@ class MemoryManager:
202
193
  Raises:
203
194
  ValueError: If LLM service is not configured or image not found
204
195
  """
205
- memory = await self._multimodal_ingestor.remember_with_image(
196
+ memory = await self._ingestion_service.remember_with_image(
206
197
  image_path=image_path,
207
198
  context=context,
208
199
  memory_type=memory_type,
@@ -249,7 +240,7 @@ class MemoryManager:
249
240
  Raises:
250
241
  ValueError: If LLM service is not configured or screenshot bytes are empty
251
242
  """
252
- memory = await self._multimodal_ingestor.remember_screenshot(
243
+ memory = await self._ingestion_service.remember_screenshot(
253
244
  screenshot_bytes=screenshot_bytes,
254
245
  context=context,
255
246
  memory_type=memory_type,
@@ -283,7 +274,7 @@ class MemoryManager:
283
274
  Returns:
284
275
  Number of cross-references created
285
276
  """
286
- return await self._crossref_service.create_crossrefs(
277
+ return await self._search_service.create_crossrefs(
287
278
  memory=memory,
288
279
  threshold=threshold,
289
280
  max_links=max_links,
@@ -306,7 +297,7 @@ class MemoryManager:
306
297
  Returns:
307
298
  List of related Memory objects, sorted by similarity
308
299
  """
309
- return await self._crossref_service.get_related(
300
+ return await self._search_service.get_related(
310
301
  memory_id=memory_id,
311
302
  limit=limit,
312
303
  min_similarity=min_similarity,
@@ -398,7 +389,7 @@ class MemoryManager:
398
389
  if use_semantic is not None:
399
390
  logger.warning("use_semantic argument is deprecated and ignored")
400
391
 
401
- return self._search_coordinator.search(
392
+ return self._search_service.search(
402
393
  query=query,
403
394
  project_id=project_id,
404
395
  limit=limit,
gobby/paths.py ADDED
@@ -0,0 +1,51 @@
1
+ """
2
+ Core path utilities for Gobby package.
3
+
4
+ This module provides stable path resolution utilities that work in both
5
+ development (source) and installed (package) modes without CLI dependencies.
6
+ """
7
+
8
+ from pathlib import Path
9
+
10
+ __all__ = ["get_package_root", "get_install_dir"]
11
+
12
+
13
+ def get_package_root() -> Path:
14
+ """Get the root directory of the gobby package.
15
+
16
+ Returns:
17
+ Path to src/gobby/ (the package root directory)
18
+ """
19
+ import gobby
20
+
21
+ return Path(gobby.__file__).parent
22
+
23
+
24
+ def get_install_dir() -> Path:
25
+ """Get the gobby install directory.
26
+
27
+ Checks for source directory (development mode) first,
28
+ falls back to package directory. This handles both:
29
+ - Development: src/gobby/install/
30
+ - Installed package: <site-packages>/gobby/install/
31
+
32
+ Returns:
33
+ Path to the install directory
34
+ """
35
+ import gobby
36
+
37
+ package_install_dir = Path(gobby.__file__).parent / "install"
38
+
39
+ # Try to find source directory (project root) for development mode
40
+ current = Path(gobby.__file__).resolve()
41
+ source_install_dir = None
42
+
43
+ for parent in current.parents:
44
+ potential_source = parent / "src" / "gobby" / "install"
45
+ if potential_source.exists():
46
+ source_install_dir = potential_source
47
+ break
48
+
49
+ if source_install_dir and source_install_dir.exists():
50
+ return source_install_dir
51
+ return package_install_dir
gobby/prompts/loader.py CHANGED
@@ -12,7 +12,6 @@ Implements prompt loading with precedence:
12
12
 
13
13
  import logging
14
14
  import re
15
- from collections.abc import Callable
16
15
  from functools import lru_cache
17
16
  from pathlib import Path
18
17
  from typing import Any
@@ -66,21 +65,6 @@ class PromptLoader:
66
65
  # Template cache
67
66
  self._cache: dict[str, PromptTemplate] = {}
68
67
 
69
- # Fallback registry for strangler fig pattern
70
- self._fallbacks: dict[str, Callable[[], str]] = {}
71
-
72
- def register_fallback(self, path: str, getter: Callable[[], str]) -> None:
73
- """Register a Python constant fallback for a template path.
74
-
75
- Used for strangler fig pattern - if template file doesn't exist,
76
- fall back to the original Python constant.
77
-
78
- Args:
79
- path: Template path (e.g., "expansion/system")
80
- getter: Callable that returns the fallback string
81
- """
82
- self._fallbacks[path] = getter
83
-
84
68
  def clear_cache(self) -> None:
85
69
  """Clear the template cache."""
86
70
  self._cache.clear()
@@ -162,19 +146,6 @@ class PromptLoader:
162
146
  logger.debug(f"Loaded prompt template '{path}' from {template_file}")
163
147
  return template
164
148
 
165
- # Fall back to registered Python constant
166
- if path in self._fallbacks:
167
- fallback_content = self._fallbacks[path]()
168
- template = PromptTemplate(
169
- name=path,
170
- description=f"Fallback for {path}",
171
- content=fallback_content,
172
- source_path=None,
173
- )
174
- self._cache[path] = template
175
- logger.debug(f"Using fallback for prompt template '{path}'")
176
- return template
177
-
178
149
  raise FileNotFoundError(f"Prompt template not found: {path}")
179
150
 
180
151
  def render(
@@ -280,7 +251,7 @@ class PromptLoader:
280
251
  Returns:
281
252
  True if template exists (file or fallback)
282
253
  """
283
- return self._find_template_file(path) is not None or path in self._fallbacks
254
+ return self._find_template_file(path) is not None
284
255
 
285
256
  def list_templates(self, category: str | None = None) -> list[str]:
286
257
  """List available template paths.
@@ -305,11 +276,6 @@ class PromptLoader:
305
276
  if category is None or template_path.startswith(f"{category}/"):
306
277
  templates.add(template_path)
307
278
 
308
- # Add registered fallbacks
309
- for fallback_path in self._fallbacks:
310
- if category is None or fallback_path.startswith(f"{category}/"):
311
- templates.add(fallback_path)
312
-
313
279
  return sorted(templates)
314
280
 
315
281