gobby 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +2 -1
- gobby/adapters/claude_code.py +13 -4
- gobby/adapters/codex_impl/__init__.py +28 -0
- gobby/adapters/codex_impl/adapter.py +722 -0
- gobby/adapters/codex_impl/client.py +679 -0
- gobby/adapters/codex_impl/protocol.py +20 -0
- gobby/adapters/codex_impl/types.py +68 -0
- gobby/agents/definitions.py +11 -1
- gobby/agents/isolation.py +395 -0
- gobby/agents/runner.py +8 -0
- gobby/agents/sandbox.py +261 -0
- gobby/agents/spawn.py +42 -287
- gobby/agents/spawn_executor.py +385 -0
- gobby/agents/spawners/__init__.py +24 -0
- gobby/agents/spawners/command_builder.py +189 -0
- gobby/agents/spawners/embedded.py +21 -2
- gobby/agents/spawners/headless.py +21 -2
- gobby/agents/spawners/prompt_manager.py +125 -0
- gobby/cli/__init__.py +6 -0
- gobby/cli/clones.py +419 -0
- gobby/cli/conductor.py +266 -0
- gobby/cli/install.py +4 -4
- gobby/cli/installers/antigravity.py +3 -9
- gobby/cli/installers/claude.py +15 -9
- gobby/cli/installers/codex.py +2 -8
- gobby/cli/installers/gemini.py +8 -8
- gobby/cli/installers/shared.py +175 -13
- gobby/cli/sessions.py +1 -1
- gobby/cli/skills.py +858 -0
- gobby/cli/tasks/ai.py +0 -440
- gobby/cli/tasks/crud.py +44 -6
- gobby/cli/tasks/main.py +0 -4
- gobby/cli/tui.py +2 -2
- gobby/cli/utils.py +12 -5
- gobby/clones/__init__.py +13 -0
- gobby/clones/git.py +547 -0
- gobby/conductor/__init__.py +16 -0
- gobby/conductor/alerts.py +135 -0
- gobby/conductor/loop.py +164 -0
- gobby/conductor/monitors/__init__.py +11 -0
- gobby/conductor/monitors/agents.py +116 -0
- gobby/conductor/monitors/tasks.py +155 -0
- gobby/conductor/pricing.py +234 -0
- gobby/conductor/token_tracker.py +160 -0
- gobby/config/__init__.py +12 -97
- gobby/config/app.py +69 -91
- gobby/config/extensions.py +2 -2
- gobby/config/features.py +7 -130
- gobby/config/search.py +110 -0
- gobby/config/servers.py +1 -1
- gobby/config/skills.py +43 -0
- gobby/config/tasks.py +9 -41
- gobby/hooks/__init__.py +0 -13
- gobby/hooks/event_handlers.py +188 -2
- gobby/hooks/hook_manager.py +50 -4
- gobby/hooks/plugins.py +1 -1
- gobby/hooks/skill_manager.py +130 -0
- gobby/hooks/webhooks.py +1 -1
- gobby/install/claude/hooks/hook_dispatcher.py +4 -4
- gobby/install/codex/hooks/hook_dispatcher.py +1 -1
- gobby/install/gemini/hooks/hook_dispatcher.py +87 -12
- gobby/llm/claude.py +22 -34
- gobby/llm/claude_executor.py +46 -256
- gobby/llm/codex_executor.py +59 -291
- gobby/llm/executor.py +21 -0
- gobby/llm/gemini.py +134 -110
- gobby/llm/litellm_executor.py +143 -6
- gobby/llm/resolver.py +98 -35
- gobby/mcp_proxy/importer.py +62 -4
- gobby/mcp_proxy/instructions.py +56 -0
- gobby/mcp_proxy/models.py +15 -0
- gobby/mcp_proxy/registries.py +68 -8
- gobby/mcp_proxy/server.py +33 -3
- gobby/mcp_proxy/services/recommendation.py +43 -11
- gobby/mcp_proxy/services/tool_proxy.py +81 -1
- gobby/mcp_proxy/stdio.py +2 -1
- gobby/mcp_proxy/tools/__init__.py +0 -2
- gobby/mcp_proxy/tools/agent_messaging.py +317 -0
- gobby/mcp_proxy/tools/agents.py +31 -731
- gobby/mcp_proxy/tools/clones.py +518 -0
- gobby/mcp_proxy/tools/memory.py +3 -26
- gobby/mcp_proxy/tools/metrics.py +65 -1
- gobby/mcp_proxy/tools/orchestration/__init__.py +3 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +151 -0
- gobby/mcp_proxy/tools/orchestration/wait.py +467 -0
- gobby/mcp_proxy/tools/sessions/__init__.py +14 -0
- gobby/mcp_proxy/tools/sessions/_commits.py +232 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +253 -0
- gobby/mcp_proxy/tools/sessions/_factory.py +63 -0
- gobby/mcp_proxy/tools/sessions/_handoff.py +499 -0
- gobby/mcp_proxy/tools/sessions/_messages.py +138 -0
- gobby/mcp_proxy/tools/skills/__init__.py +616 -0
- gobby/mcp_proxy/tools/spawn_agent.py +417 -0
- gobby/mcp_proxy/tools/task_orchestration.py +7 -0
- gobby/mcp_proxy/tools/task_readiness.py +14 -0
- gobby/mcp_proxy/tools/task_sync.py +1 -1
- gobby/mcp_proxy/tools/tasks/_context.py +0 -20
- gobby/mcp_proxy/tools/tasks/_crud.py +91 -4
- gobby/mcp_proxy/tools/tasks/_expansion.py +348 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +6 -16
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +110 -45
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +18 -29
- gobby/mcp_proxy/tools/workflows.py +1 -1
- gobby/mcp_proxy/tools/worktrees.py +0 -338
- gobby/memory/backends/__init__.py +6 -1
- gobby/memory/backends/mem0.py +6 -1
- gobby/memory/extractor.py +477 -0
- gobby/memory/ingestion/__init__.py +5 -0
- gobby/memory/ingestion/multimodal.py +221 -0
- gobby/memory/manager.py +73 -285
- gobby/memory/search/__init__.py +10 -0
- gobby/memory/search/coordinator.py +248 -0
- gobby/memory/services/__init__.py +5 -0
- gobby/memory/services/crossref.py +142 -0
- gobby/prompts/loader.py +5 -2
- gobby/runner.py +37 -16
- gobby/search/__init__.py +48 -6
- gobby/search/backends/__init__.py +159 -0
- gobby/search/backends/embedding.py +225 -0
- gobby/search/embeddings.py +238 -0
- gobby/search/models.py +148 -0
- gobby/search/unified.py +496 -0
- gobby/servers/http.py +24 -12
- gobby/servers/routes/admin.py +294 -0
- gobby/servers/routes/mcp/endpoints/__init__.py +61 -0
- gobby/servers/routes/mcp/endpoints/discovery.py +405 -0
- gobby/servers/routes/mcp/endpoints/execution.py +568 -0
- gobby/servers/routes/mcp/endpoints/registry.py +378 -0
- gobby/servers/routes/mcp/endpoints/server.py +304 -0
- gobby/servers/routes/mcp/hooks.py +1 -1
- gobby/servers/routes/mcp/tools.py +48 -1317
- gobby/servers/websocket.py +2 -2
- gobby/sessions/analyzer.py +2 -0
- gobby/sessions/lifecycle.py +1 -1
- gobby/sessions/processor.py +10 -0
- gobby/sessions/transcripts/base.py +2 -0
- gobby/sessions/transcripts/claude.py +79 -10
- gobby/skills/__init__.py +91 -0
- gobby/skills/loader.py +685 -0
- gobby/skills/manager.py +384 -0
- gobby/skills/parser.py +286 -0
- gobby/skills/search.py +463 -0
- gobby/skills/sync.py +119 -0
- gobby/skills/updater.py +385 -0
- gobby/skills/validator.py +368 -0
- gobby/storage/clones.py +378 -0
- gobby/storage/database.py +1 -1
- gobby/storage/memories.py +43 -13
- gobby/storage/migrations.py +162 -201
- gobby/storage/sessions.py +116 -7
- gobby/storage/skills.py +782 -0
- gobby/storage/tasks/_crud.py +4 -4
- gobby/storage/tasks/_lifecycle.py +57 -7
- gobby/storage/tasks/_manager.py +14 -5
- gobby/storage/tasks/_models.py +8 -3
- gobby/sync/memories.py +40 -5
- gobby/sync/tasks.py +83 -6
- gobby/tasks/__init__.py +1 -2
- gobby/tasks/external_validator.py +1 -1
- gobby/tasks/validation.py +46 -35
- gobby/tools/summarizer.py +91 -10
- gobby/tui/api_client.py +4 -7
- gobby/tui/app.py +5 -3
- gobby/tui/screens/orchestrator.py +1 -2
- gobby/tui/screens/tasks.py +2 -4
- gobby/tui/ws_client.py +1 -1
- gobby/utils/daemon_client.py +2 -2
- gobby/utils/project_context.py +2 -3
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +221 -1135
- gobby/workflows/artifact_actions.py +31 -0
- gobby/workflows/autonomous_actions.py +11 -0
- gobby/workflows/context_actions.py +93 -1
- gobby/workflows/detection_helpers.py +115 -31
- gobby/workflows/enforcement/__init__.py +47 -0
- gobby/workflows/enforcement/blocking.py +269 -0
- gobby/workflows/enforcement/commit_policy.py +283 -0
- gobby/workflows/enforcement/handlers.py +269 -0
- gobby/workflows/{task_enforcement_actions.py → enforcement/task_policy.py} +29 -388
- gobby/workflows/engine.py +13 -2
- gobby/workflows/git_utils.py +106 -0
- gobby/workflows/lifecycle_evaluator.py +29 -1
- gobby/workflows/llm_actions.py +30 -0
- gobby/workflows/loader.py +19 -6
- gobby/workflows/mcp_actions.py +20 -1
- gobby/workflows/memory_actions.py +154 -0
- gobby/workflows/safe_evaluator.py +183 -0
- gobby/workflows/session_actions.py +44 -0
- gobby/workflows/state_actions.py +60 -1
- gobby/workflows/stop_signal_actions.py +55 -0
- gobby/workflows/summary_actions.py +111 -1
- gobby/workflows/task_sync_actions.py +347 -0
- gobby/workflows/todo_actions.py +34 -1
- gobby/workflows/webhook_actions.py +185 -0
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/METADATA +87 -21
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/RECORD +201 -172
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/WHEEL +1 -1
- gobby/adapters/codex.py +0 -1292
- gobby/install/claude/commands/gobby/bug.md +0 -51
- gobby/install/claude/commands/gobby/chore.md +0 -51
- gobby/install/claude/commands/gobby/epic.md +0 -52
- gobby/install/claude/commands/gobby/eval.md +0 -235
- gobby/install/claude/commands/gobby/feat.md +0 -49
- gobby/install/claude/commands/gobby/nit.md +0 -52
- gobby/install/claude/commands/gobby/ref.md +0 -52
- gobby/install/codex/prompts/forget.md +0 -7
- gobby/install/codex/prompts/memories.md +0 -7
- gobby/install/codex/prompts/recall.md +0 -7
- gobby/install/codex/prompts/remember.md +0 -13
- gobby/llm/gemini_executor.py +0 -339
- gobby/mcp_proxy/tools/session_messages.py +0 -1056
- gobby/mcp_proxy/tools/task_expansion.py +0 -591
- gobby/prompts/defaults/expansion/system.md +0 -119
- gobby/prompts/defaults/expansion/user.md +0 -48
- gobby/prompts/defaults/external_validation/agent.md +0 -72
- gobby/prompts/defaults/external_validation/external.md +0 -63
- gobby/prompts/defaults/external_validation/spawn.md +0 -83
- gobby/prompts/defaults/external_validation/system.md +0 -6
- gobby/prompts/defaults/features/import_mcp.md +0 -22
- gobby/prompts/defaults/features/import_mcp_github.md +0 -17
- gobby/prompts/defaults/features/import_mcp_search.md +0 -16
- gobby/prompts/defaults/features/recommend_tools.md +0 -32
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +0 -35
- gobby/prompts/defaults/features/recommend_tools_llm.md +0 -30
- gobby/prompts/defaults/features/server_description.md +0 -20
- gobby/prompts/defaults/features/server_description_system.md +0 -6
- gobby/prompts/defaults/features/task_description.md +0 -31
- gobby/prompts/defaults/features/task_description_system.md +0 -6
- gobby/prompts/defaults/features/tool_summary.md +0 -17
- gobby/prompts/defaults/features/tool_summary_system.md +0 -6
- gobby/prompts/defaults/research/step.md +0 -58
- gobby/prompts/defaults/validation/criteria.md +0 -47
- gobby/prompts/defaults/validation/validate.md +0 -38
- gobby/storage/migrations_legacy.py +0 -1359
- gobby/tasks/context.py +0 -747
- gobby/tasks/criteria.py +0 -342
- gobby/tasks/expansion.py +0 -626
- gobby/tasks/prompts/expand.py +0 -327
- gobby/tasks/research.py +0 -421
- gobby/tasks/tdd.py +0 -352
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/top_level.txt +0 -0
gobby/tasks/context.py
DELETED
|
@@ -1,747 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Context gathering for task expansion.
|
|
3
|
-
|
|
4
|
-
This module provides tools to gather relevant context from the codebase and
|
|
5
|
-
project state to inform the task expansion process.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
from __future__ import annotations
|
|
9
|
-
|
|
10
|
-
import ast
|
|
11
|
-
import asyncio
|
|
12
|
-
import itertools
|
|
13
|
-
import logging
|
|
14
|
-
from dataclasses import dataclass
|
|
15
|
-
from pathlib import Path
|
|
16
|
-
from typing import Any
|
|
17
|
-
|
|
18
|
-
from gobby.storage.tasks import Task
|
|
19
|
-
from gobby.utils.project_context import find_project_root
|
|
20
|
-
|
|
21
|
-
logger = logging.getLogger(__name__)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
@dataclass
|
|
25
|
-
class ExpansionContext:
|
|
26
|
-
"""Context gathered for task expansion."""
|
|
27
|
-
|
|
28
|
-
task: Task
|
|
29
|
-
related_tasks: list[Task]
|
|
30
|
-
relevant_files: list[str]
|
|
31
|
-
file_snippets: dict[str, str]
|
|
32
|
-
project_patterns: dict[str, str]
|
|
33
|
-
agent_findings: str = ""
|
|
34
|
-
web_research: list[dict[str, Any]] | None = None
|
|
35
|
-
existing_tests: dict[str, list[str]] | None = None # module -> [test files]
|
|
36
|
-
function_signatures: dict[str, list[str]] | None = None # file -> [signatures]
|
|
37
|
-
verification_commands: dict[str, str] | None = None # name -> command
|
|
38
|
-
project_structure: str | None = None # tree view of project directories
|
|
39
|
-
|
|
40
|
-
def to_dict(self) -> dict[str, Any]:
|
|
41
|
-
"""Convert to dictionary."""
|
|
42
|
-
return {
|
|
43
|
-
"task": self.task.to_dict(),
|
|
44
|
-
"related_tasks": [t.to_dict() for t in self.related_tasks],
|
|
45
|
-
"relevant_files": self.relevant_files,
|
|
46
|
-
"project_patterns": self.project_patterns,
|
|
47
|
-
"agent_findings": self.agent_findings,
|
|
48
|
-
"web_research": self.web_research,
|
|
49
|
-
"existing_tests": self.existing_tests,
|
|
50
|
-
"function_signatures": self.function_signatures,
|
|
51
|
-
"verification_commands": self.verification_commands,
|
|
52
|
-
"project_structure": self.project_structure,
|
|
53
|
-
# We don't include full snippets in dict summary often, but useful for debug
|
|
54
|
-
"snippet_count": len(self.file_snippets),
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
class ExpansionContextGatherer:
|
|
59
|
-
"""Gathers context for task expansion."""
|
|
60
|
-
|
|
61
|
-
def __init__(
|
|
62
|
-
self,
|
|
63
|
-
task_manager: Any,
|
|
64
|
-
llm_service: Any = None,
|
|
65
|
-
config: Any = None,
|
|
66
|
-
mcp_manager: Any = None,
|
|
67
|
-
): # Type Any to avoid circular import
|
|
68
|
-
self.task_manager = task_manager
|
|
69
|
-
self.llm_service = llm_service
|
|
70
|
-
self.config = config
|
|
71
|
-
self.mcp_manager = mcp_manager
|
|
72
|
-
|
|
73
|
-
async def gather_context(
|
|
74
|
-
self,
|
|
75
|
-
task: Task,
|
|
76
|
-
enable_web_research: bool = False,
|
|
77
|
-
enable_code_context: bool = True,
|
|
78
|
-
) -> ExpansionContext:
|
|
79
|
-
"""
|
|
80
|
-
Gather all relevant context for a task.
|
|
81
|
-
|
|
82
|
-
Args:
|
|
83
|
-
task: The task to gather context for.
|
|
84
|
-
enable_web_research: Whether to enable web research.
|
|
85
|
-
enable_code_context: Whether to enable code context gathering.
|
|
86
|
-
Returns:
|
|
87
|
-
Populated ExpansionContext object.
|
|
88
|
-
"""
|
|
89
|
-
logger.info(f"Gathering expansion context for task {task.id}")
|
|
90
|
-
|
|
91
|
-
related_tasks = await self._find_related_tasks(task)
|
|
92
|
-
|
|
93
|
-
# 1. Regex/Heuristic based file finding
|
|
94
|
-
relevant_files = []
|
|
95
|
-
if enable_code_context:
|
|
96
|
-
relevant_files = await self._find_relevant_files(task)
|
|
97
|
-
|
|
98
|
-
# 2. Agentic research (if enabled)
|
|
99
|
-
agent_findings = ""
|
|
100
|
-
web_research: list[dict[str, Any]] | None = None
|
|
101
|
-
research_globally_enabled = getattr(self.config, "codebase_research_enabled", False)
|
|
102
|
-
should_run_research = enable_code_context and research_globally_enabled
|
|
103
|
-
|
|
104
|
-
if should_run_research and self.llm_service:
|
|
105
|
-
# Apply research timeout if configured
|
|
106
|
-
research_timeout = getattr(self.config, "research_timeout", 60.0)
|
|
107
|
-
try:
|
|
108
|
-
async with asyncio.timeout(research_timeout):
|
|
109
|
-
from gobby.tasks.research import TaskResearchAgent
|
|
110
|
-
|
|
111
|
-
agent = TaskResearchAgent(self.config, self.llm_service, self.mcp_manager)
|
|
112
|
-
research_result = await agent.run(task, enable_web_search=enable_web_research)
|
|
113
|
-
|
|
114
|
-
# Merge found files
|
|
115
|
-
for f in research_result.get("relevant_files", []):
|
|
116
|
-
if f not in relevant_files:
|
|
117
|
-
relevant_files.append(f)
|
|
118
|
-
|
|
119
|
-
agent_findings = research_result.get("findings", "")
|
|
120
|
-
|
|
121
|
-
# Capture web research results if any
|
|
122
|
-
web_research_data = research_result.get("web_research", [])
|
|
123
|
-
if web_research_data:
|
|
124
|
-
web_research = web_research_data
|
|
125
|
-
logger.info(f"Captured {len(web_research_data)} web search results")
|
|
126
|
-
|
|
127
|
-
logger.info(
|
|
128
|
-
f"Agentic research added {len(research_result.get('relevant_files', []))} files"
|
|
129
|
-
)
|
|
130
|
-
except TimeoutError:
|
|
131
|
-
logger.warning(
|
|
132
|
-
f"Research phase timed out after {research_timeout}s. "
|
|
133
|
-
f"Continuing with partial context. Consider increasing task_expansion.research_timeout."
|
|
134
|
-
)
|
|
135
|
-
except Exception as e:
|
|
136
|
-
logger.error(f"Agentic research failed: {e}")
|
|
137
|
-
|
|
138
|
-
file_snippets = self._read_file_snippets(relevant_files)
|
|
139
|
-
project_patterns = self._detect_project_patterns()
|
|
140
|
-
|
|
141
|
-
# Discover existing tests for relevant Python files
|
|
142
|
-
python_files = [f for f in relevant_files if f.endswith(".py")]
|
|
143
|
-
existing_tests = self.discover_existing_tests(python_files) if python_files else {}
|
|
144
|
-
|
|
145
|
-
# Extract function signatures from Python files
|
|
146
|
-
function_signatures = self.extract_signatures(python_files) if python_files else {}
|
|
147
|
-
|
|
148
|
-
# Get verification commands from project config
|
|
149
|
-
verification_commands = self._get_verification_commands()
|
|
150
|
-
|
|
151
|
-
# Generate project structure tree
|
|
152
|
-
project_structure = await self._generate_project_structure()
|
|
153
|
-
|
|
154
|
-
return ExpansionContext(
|
|
155
|
-
task=task,
|
|
156
|
-
related_tasks=related_tasks,
|
|
157
|
-
relevant_files=relevant_files,
|
|
158
|
-
file_snippets=file_snippets,
|
|
159
|
-
project_patterns=project_patterns,
|
|
160
|
-
agent_findings=agent_findings,
|
|
161
|
-
web_research=web_research,
|
|
162
|
-
existing_tests=existing_tests if existing_tests else None,
|
|
163
|
-
function_signatures=function_signatures if function_signatures else None,
|
|
164
|
-
verification_commands=verification_commands if verification_commands else None,
|
|
165
|
-
project_structure=project_structure,
|
|
166
|
-
)
|
|
167
|
-
|
|
168
|
-
async def _find_related_tasks(self, task: Task) -> list[Task]:
|
|
169
|
-
"""Find tasks related to the current task using fuzzy match or project."""
|
|
170
|
-
# Simple implementation for now: latest tasks in same project
|
|
171
|
-
# In the future, this could use vector search or title fuzzy matching
|
|
172
|
-
cols = self.task_manager.list_tasks(
|
|
173
|
-
project_id=task.project_id,
|
|
174
|
-
limit=5,
|
|
175
|
-
status="open",
|
|
176
|
-
)
|
|
177
|
-
return [t for t in cols if t.id != task.id]
|
|
178
|
-
|
|
179
|
-
async def _find_relevant_files(self, task: Task) -> list[str]:
|
|
180
|
-
"""Find files relevant to the task description."""
|
|
181
|
-
# Placeholder for actual relevance logic (e.g. grep or filenames in description)
|
|
182
|
-
# For now, return empty list or naive scan?
|
|
183
|
-
# Let's do a simple check: if description mentions a file existing in src, include it.
|
|
184
|
-
root = find_project_root()
|
|
185
|
-
if not root:
|
|
186
|
-
return []
|
|
187
|
-
|
|
188
|
-
relevant = []
|
|
189
|
-
# Naive: splits description and checks if tokens match filenames
|
|
190
|
-
# This is very basic but serves as a starting point.
|
|
191
|
-
if task.description:
|
|
192
|
-
# Regex to find potential file paths:
|
|
193
|
-
# - alphanumeric, dots, slashes, dashes, underscores
|
|
194
|
-
# - must end with a common extension
|
|
195
|
-
# - length constraint to avoid noise
|
|
196
|
-
import re
|
|
197
|
-
|
|
198
|
-
# Common extensions to look for
|
|
199
|
-
extensions = "py|js|ts|tsx|jsx|md|json|html|css|yaml|toml|sh"
|
|
200
|
-
pattern = re.compile(rf"(?:\.?/)?[\w\-/_]+\.(?:{extensions})\b", re.IGNORECASE)
|
|
201
|
-
|
|
202
|
-
matches = pattern.findall(task.description)
|
|
203
|
-
for match in matches:
|
|
204
|
-
# Clean up match
|
|
205
|
-
fpath = match.strip()
|
|
206
|
-
# Resolve path
|
|
207
|
-
try:
|
|
208
|
-
path = (root / fpath).resolve()
|
|
209
|
-
# Security check: must be within root
|
|
210
|
-
if root in path.parents or path == root:
|
|
211
|
-
if path.exists() and path.is_file():
|
|
212
|
-
rel_path = str(path.relative_to(root))
|
|
213
|
-
if rel_path not in relevant:
|
|
214
|
-
relevant.append(rel_path)
|
|
215
|
-
except Exception:
|
|
216
|
-
continue # nosec B112 - skip files we can't process
|
|
217
|
-
|
|
218
|
-
return relevant
|
|
219
|
-
|
|
220
|
-
def _read_file_snippets(self, files: list[str]) -> dict[str, str]:
|
|
221
|
-
"""Read content of relevant files."""
|
|
222
|
-
snippets: dict[str, str] = {}
|
|
223
|
-
root = find_project_root()
|
|
224
|
-
if not root:
|
|
225
|
-
return snippets
|
|
226
|
-
|
|
227
|
-
for fname in files:
|
|
228
|
-
path = root / fname
|
|
229
|
-
if path.exists() and path.is_file():
|
|
230
|
-
try:
|
|
231
|
-
# Read first 50 lines as context
|
|
232
|
-
with open(path, encoding="utf-8") as f:
|
|
233
|
-
lines = list(itertools.islice(f, 50))
|
|
234
|
-
snippets[fname] = "".join(lines)
|
|
235
|
-
except Exception as e:
|
|
236
|
-
logger.warning(f"Failed to read context file {fname}: {e}")
|
|
237
|
-
return snippets
|
|
238
|
-
|
|
239
|
-
def _detect_project_patterns(self) -> dict[str, str]:
|
|
240
|
-
"""Detect project patterns (e.g. test framework, language)."""
|
|
241
|
-
patterns: dict[str, str] = {}
|
|
242
|
-
root = find_project_root()
|
|
243
|
-
if not root:
|
|
244
|
-
return patterns
|
|
245
|
-
|
|
246
|
-
# Check for common config files
|
|
247
|
-
if (root / "pyproject.toml").exists():
|
|
248
|
-
patterns["build_system"] = "pyproject.toml"
|
|
249
|
-
if (root / "package.json").exists():
|
|
250
|
-
patterns["frontend"] = "npm/node"
|
|
251
|
-
|
|
252
|
-
# Check for test directories
|
|
253
|
-
if (root / "tests").exists():
|
|
254
|
-
patterns["tests"] = "tests/"
|
|
255
|
-
|
|
256
|
-
return patterns
|
|
257
|
-
|
|
258
|
-
def _get_verification_commands(self) -> dict[str, str]:
|
|
259
|
-
"""Get verification commands from project config.
|
|
260
|
-
|
|
261
|
-
Returns:
|
|
262
|
-
Dict mapping command names to their values, e.g.:
|
|
263
|
-
{
|
|
264
|
-
"unit_tests": "npm test",
|
|
265
|
-
"lint": "npm run lint",
|
|
266
|
-
"type_check": "npm run typecheck"
|
|
267
|
-
}
|
|
268
|
-
"""
|
|
269
|
-
from gobby.utils.project_context import get_verification_config
|
|
270
|
-
|
|
271
|
-
commands: dict[str, str] = {}
|
|
272
|
-
config = get_verification_config()
|
|
273
|
-
|
|
274
|
-
if not config:
|
|
275
|
-
return commands
|
|
276
|
-
|
|
277
|
-
if config.unit_tests:
|
|
278
|
-
commands["unit_tests"] = config.unit_tests
|
|
279
|
-
if config.type_check:
|
|
280
|
-
commands["type_check"] = config.type_check
|
|
281
|
-
if config.lint:
|
|
282
|
-
commands["lint"] = config.lint
|
|
283
|
-
if config.integration:
|
|
284
|
-
commands["integration"] = config.integration
|
|
285
|
-
|
|
286
|
-
# Include any custom commands
|
|
287
|
-
if config.custom:
|
|
288
|
-
commands.update(config.custom)
|
|
289
|
-
|
|
290
|
-
return commands
|
|
291
|
-
|
|
292
|
-
def discover_existing_tests(self, module_paths: list[str]) -> dict[str, list[str]]:
|
|
293
|
-
"""
|
|
294
|
-
Find test files that cover the given modules.
|
|
295
|
-
|
|
296
|
-
For each module path, searches the tests/ directory for files that
|
|
297
|
-
import from that module.
|
|
298
|
-
|
|
299
|
-
Args:
|
|
300
|
-
module_paths: List of file paths (e.g., ['src/gobby/tasks/expansion.py'])
|
|
301
|
-
|
|
302
|
-
Returns:
|
|
303
|
-
Dict mapping module path to list of test files that import it.
|
|
304
|
-
"""
|
|
305
|
-
import re
|
|
306
|
-
import subprocess # nosec B404 - subprocess needed for grep command
|
|
307
|
-
|
|
308
|
-
result: dict[str, list[str]] = {}
|
|
309
|
-
root = find_project_root()
|
|
310
|
-
if not root:
|
|
311
|
-
return result
|
|
312
|
-
|
|
313
|
-
tests_dir = root / "tests"
|
|
314
|
-
if not tests_dir.exists():
|
|
315
|
-
return result
|
|
316
|
-
|
|
317
|
-
for module_path in module_paths:
|
|
318
|
-
# Convert file path to import path
|
|
319
|
-
# e.g., src/gobby/tasks/expansion.py -> gobby.tasks.expansion
|
|
320
|
-
import_path = self._path_to_import(module_path)
|
|
321
|
-
if not import_path:
|
|
322
|
-
continue
|
|
323
|
-
|
|
324
|
-
# Search for imports of this module in tests/
|
|
325
|
-
try:
|
|
326
|
-
# Use grep to find test files that import this module
|
|
327
|
-
# Pattern matches: from {module} import, import {module}
|
|
328
|
-
pattern = rf"(from\s+{re.escape(import_path)}(\.\w+)*\s+import|import\s+{re.escape(import_path)})"
|
|
329
|
-
grep_result = subprocess.run( # nosec B603 B607 - grep with constructed pattern on tests directory
|
|
330
|
-
["grep", "-r", "-l", "-E", pattern, str(tests_dir)],
|
|
331
|
-
capture_output=True,
|
|
332
|
-
text=True,
|
|
333
|
-
timeout=10,
|
|
334
|
-
)
|
|
335
|
-
|
|
336
|
-
if grep_result.returncode == 0 and grep_result.stdout.strip():
|
|
337
|
-
test_files = []
|
|
338
|
-
for line in grep_result.stdout.strip().split("\n"):
|
|
339
|
-
if line:
|
|
340
|
-
# Convert to relative path from project root
|
|
341
|
-
rel_path = line.replace(str(root) + "/", "")
|
|
342
|
-
test_files.append(rel_path)
|
|
343
|
-
|
|
344
|
-
if test_files:
|
|
345
|
-
result[module_path] = test_files
|
|
346
|
-
logger.debug(
|
|
347
|
-
f"Found {len(test_files)} test files for {module_path}: {test_files}"
|
|
348
|
-
)
|
|
349
|
-
|
|
350
|
-
except subprocess.TimeoutExpired:
|
|
351
|
-
logger.warning(f"Timeout searching for tests of {module_path}")
|
|
352
|
-
except Exception as e:
|
|
353
|
-
logger.warning(f"Error searching for tests of {module_path}: {e}")
|
|
354
|
-
|
|
355
|
-
return result
|
|
356
|
-
|
|
357
|
-
def _path_to_import(self, file_path: str) -> str | None:
|
|
358
|
-
"""
|
|
359
|
-
Convert a file path to a Python import path.
|
|
360
|
-
|
|
361
|
-
Args:
|
|
362
|
-
file_path: File path like 'src/gobby/tasks/expansion.py'
|
|
363
|
-
|
|
364
|
-
Returns:
|
|
365
|
-
Import path like 'gobby.tasks.expansion', or None if not convertible.
|
|
366
|
-
"""
|
|
367
|
-
# Remove .py extension
|
|
368
|
-
if not file_path.endswith(".py"):
|
|
369
|
-
return None
|
|
370
|
-
|
|
371
|
-
path = file_path[:-3] # Remove .py
|
|
372
|
-
|
|
373
|
-
# Remove common prefixes
|
|
374
|
-
for prefix in ["src/", "lib/"]:
|
|
375
|
-
if path.startswith(prefix):
|
|
376
|
-
path = path[len(prefix) :]
|
|
377
|
-
break
|
|
378
|
-
|
|
379
|
-
# Convert slashes to dots
|
|
380
|
-
import_path = path.replace("/", ".")
|
|
381
|
-
|
|
382
|
-
# Remove __init__ suffix if present
|
|
383
|
-
if import_path.endswith(".__init__"):
|
|
384
|
-
import_path = import_path[:-9]
|
|
385
|
-
|
|
386
|
-
return import_path if import_path else None
|
|
387
|
-
|
|
388
|
-
def extract_signatures(self, file_paths: list[str]) -> dict[str, list[str]]:
|
|
389
|
-
"""
|
|
390
|
-
Extract function and class signatures from Python files using AST.
|
|
391
|
-
|
|
392
|
-
Args:
|
|
393
|
-
file_paths: List of file paths (e.g., ['src/gobby/tasks/expansion.py'])
|
|
394
|
-
|
|
395
|
-
Returns:
|
|
396
|
-
Dict mapping file path to list of signatures:
|
|
397
|
-
{
|
|
398
|
-
'src/gobby/tasks/expansion.py': [
|
|
399
|
-
'class TaskExpander',
|
|
400
|
-
'def expand_task(self, task_id: str, ...) -> dict[str, Any]',
|
|
401
|
-
'def _parse_subtasks(self, response: str) -> list[SubtaskSpec]',
|
|
402
|
-
]
|
|
403
|
-
}
|
|
404
|
-
"""
|
|
405
|
-
result: dict[str, list[str]] = {}
|
|
406
|
-
root = find_project_root()
|
|
407
|
-
if not root:
|
|
408
|
-
return result
|
|
409
|
-
|
|
410
|
-
for file_path in file_paths:
|
|
411
|
-
# Only process Python files
|
|
412
|
-
if not file_path.endswith(".py"):
|
|
413
|
-
continue
|
|
414
|
-
|
|
415
|
-
full_path = root / file_path
|
|
416
|
-
if not full_path.exists() or not full_path.is_file():
|
|
417
|
-
continue
|
|
418
|
-
|
|
419
|
-
try:
|
|
420
|
-
with open(full_path, encoding="utf-8") as f:
|
|
421
|
-
source = f.read()
|
|
422
|
-
|
|
423
|
-
tree = ast.parse(source)
|
|
424
|
-
signatures = self._extract_signatures_from_ast(tree)
|
|
425
|
-
|
|
426
|
-
if signatures:
|
|
427
|
-
result[file_path] = signatures
|
|
428
|
-
logger.debug(f"Extracted {len(signatures)} signatures from {file_path}")
|
|
429
|
-
|
|
430
|
-
except SyntaxError as e:
|
|
431
|
-
logger.warning(f"Syntax error parsing {file_path}: {e}")
|
|
432
|
-
except Exception as e:
|
|
433
|
-
logger.warning(f"Error extracting signatures from {file_path}: {e}")
|
|
434
|
-
|
|
435
|
-
return result
|
|
436
|
-
|
|
437
|
-
def _extract_signatures_from_ast(self, tree: ast.AST) -> list[str]:
|
|
438
|
-
"""
|
|
439
|
-
Extract signatures from an AST tree.
|
|
440
|
-
|
|
441
|
-
Args:
|
|
442
|
-
tree: Parsed AST tree
|
|
443
|
-
|
|
444
|
-
Returns:
|
|
445
|
-
List of signature strings
|
|
446
|
-
"""
|
|
447
|
-
signatures: list[str] = []
|
|
448
|
-
|
|
449
|
-
for node in ast.walk(tree):
|
|
450
|
-
if isinstance(node, ast.ClassDef):
|
|
451
|
-
# Class signature
|
|
452
|
-
bases = ", ".join(self._get_base_names(node))
|
|
453
|
-
if bases:
|
|
454
|
-
signatures.append(f"class {node.name}({bases})")
|
|
455
|
-
else:
|
|
456
|
-
signatures.append(f"class {node.name}")
|
|
457
|
-
|
|
458
|
-
elif isinstance(node, ast.FunctionDef | ast.AsyncFunctionDef):
|
|
459
|
-
# Function signature with type hints
|
|
460
|
-
sig = self._format_function_signature(node)
|
|
461
|
-
signatures.append(sig)
|
|
462
|
-
|
|
463
|
-
return signatures
|
|
464
|
-
|
|
465
|
-
def _get_base_names(self, class_node: ast.ClassDef) -> list[str]:
|
|
466
|
-
"""Get base class names from a ClassDef node."""
|
|
467
|
-
names: list[str] = []
|
|
468
|
-
for base in class_node.bases:
|
|
469
|
-
if isinstance(base, ast.Name):
|
|
470
|
-
names.append(base.id)
|
|
471
|
-
elif isinstance(base, ast.Attribute):
|
|
472
|
-
# Handle cases like module.Class
|
|
473
|
-
names.append(ast.unparse(base))
|
|
474
|
-
elif isinstance(base, ast.Subscript):
|
|
475
|
-
# Handle generics like Generic[T]
|
|
476
|
-
names.append(ast.unparse(base))
|
|
477
|
-
return names
|
|
478
|
-
|
|
479
|
-
def _format_function_signature(self, func_node: ast.FunctionDef | ast.AsyncFunctionDef) -> str:
|
|
480
|
-
"""
|
|
481
|
-
Format a function signature with type hints.
|
|
482
|
-
|
|
483
|
-
Args:
|
|
484
|
-
func_node: Function AST node
|
|
485
|
-
|
|
486
|
-
Returns:
|
|
487
|
-
Formatted signature string like 'def foo(x: int, y: str) -> bool'
|
|
488
|
-
"""
|
|
489
|
-
prefix = "async def" if isinstance(func_node, ast.AsyncFunctionDef) else "def"
|
|
490
|
-
name = func_node.name
|
|
491
|
-
|
|
492
|
-
# Format arguments
|
|
493
|
-
args_parts: list[str] = []
|
|
494
|
-
|
|
495
|
-
# Handle positional-only args (Python 3.8+)
|
|
496
|
-
for arg in func_node.args.posonlyargs:
|
|
497
|
-
args_parts.append(self._format_arg(arg))
|
|
498
|
-
if func_node.args.posonlyargs:
|
|
499
|
-
args_parts.append("/")
|
|
500
|
-
|
|
501
|
-
# Regular args
|
|
502
|
-
num_defaults = len(func_node.args.defaults)
|
|
503
|
-
num_args = len(func_node.args.args)
|
|
504
|
-
for i, arg in enumerate(func_node.args.args):
|
|
505
|
-
default_idx = i - (num_args - num_defaults)
|
|
506
|
-
if default_idx >= 0:
|
|
507
|
-
args_parts.append(f"{self._format_arg(arg)}=...")
|
|
508
|
-
else:
|
|
509
|
-
args_parts.append(self._format_arg(arg))
|
|
510
|
-
|
|
511
|
-
# *args
|
|
512
|
-
if func_node.args.vararg:
|
|
513
|
-
args_parts.append(f"*{self._format_arg(func_node.args.vararg)}")
|
|
514
|
-
elif func_node.args.kwonlyargs:
|
|
515
|
-
args_parts.append("*")
|
|
516
|
-
|
|
517
|
-
# Keyword-only args
|
|
518
|
-
for i, arg in enumerate(func_node.args.kwonlyargs):
|
|
519
|
-
if func_node.args.kw_defaults[i] is not None:
|
|
520
|
-
args_parts.append(f"{self._format_arg(arg)}=...")
|
|
521
|
-
else:
|
|
522
|
-
args_parts.append(self._format_arg(arg))
|
|
523
|
-
|
|
524
|
-
# **kwargs
|
|
525
|
-
if func_node.args.kwarg:
|
|
526
|
-
args_parts.append(f"**{self._format_arg(func_node.args.kwarg)}")
|
|
527
|
-
|
|
528
|
-
args_str = ", ".join(args_parts)
|
|
529
|
-
|
|
530
|
-
# Return type annotation
|
|
531
|
-
return_annotation = ""
|
|
532
|
-
if func_node.returns:
|
|
533
|
-
try:
|
|
534
|
-
return_annotation = f" -> {ast.unparse(func_node.returns)}"
|
|
535
|
-
except Exception:
|
|
536
|
-
return_annotation = " -> ..."
|
|
537
|
-
|
|
538
|
-
return f"{prefix} {name}({args_str}){return_annotation}"
|
|
539
|
-
|
|
540
|
-
def _format_arg(self, arg: ast.arg) -> str:
|
|
541
|
-
"""Format a function argument with optional type annotation."""
|
|
542
|
-
if arg.annotation:
|
|
543
|
-
try:
|
|
544
|
-
return f"{arg.arg}: {ast.unparse(arg.annotation)}"
|
|
545
|
-
except Exception:
|
|
546
|
-
return f"{arg.arg}: ..."
|
|
547
|
-
return arg.arg
|
|
548
|
-
|
|
549
|
-
async def _generate_project_structure(self, max_depth: int = 3) -> str | None:
|
|
550
|
-
"""
|
|
551
|
-
Generate a tree view of the project structure.
|
|
552
|
-
|
|
553
|
-
Primary: Uses gitingest (works with any language, respects .gitignore)
|
|
554
|
-
Fallback: Custom tree builder using pathlib
|
|
555
|
-
|
|
556
|
-
This provides context to help the LLM understand where files should
|
|
557
|
-
be placed, preventing hallucinated paths like 'gt/core/file.py'.
|
|
558
|
-
|
|
559
|
-
Args:
|
|
560
|
-
max_depth: Maximum depth for fallback tree builder
|
|
561
|
-
|
|
562
|
-
Returns:
|
|
563
|
-
Tree view string with file placement guidance, or None if failed.
|
|
564
|
-
"""
|
|
565
|
-
root = find_project_root()
|
|
566
|
-
if not root:
|
|
567
|
-
return None
|
|
568
|
-
|
|
569
|
-
tree = None
|
|
570
|
-
|
|
571
|
-
# Primary: Try gitingest (use async version since we're in async context)
|
|
572
|
-
try:
|
|
573
|
-
# Import gitingest (this will hijack logging via configure_logging())
|
|
574
|
-
import logging as _logging
|
|
575
|
-
|
|
576
|
-
from loguru import logger as loguru_logger
|
|
577
|
-
|
|
578
|
-
# Capture existing loguru sink IDs before gitingest adds its own
|
|
579
|
-
# loguru._core.core.handlers is a dict mapping sink_id -> handler
|
|
580
|
-
existing_sink_ids: set[int] = set()
|
|
581
|
-
if hasattr(loguru_logger, "_core") and hasattr(loguru_logger._core, "handlers"):
|
|
582
|
-
existing_sink_ids = set(loguru_logger._core.handlers.keys())
|
|
583
|
-
|
|
584
|
-
from gitingest import ingest_async
|
|
585
|
-
|
|
586
|
-
# Undo gitingest's logging hijack:
|
|
587
|
-
# gitingest installs an InterceptHandler on Python's root logger and adds
|
|
588
|
-
# a loguru stderr sink. This causes ALL standard library logging (including
|
|
589
|
-
# gobby.tasks.expansion) to be routed through loguru's formatted output.
|
|
590
|
-
# 1. Remove the InterceptHandler from root logger
|
|
591
|
-
_root = _logging.getLogger()
|
|
592
|
-
_root.handlers = [
|
|
593
|
-
h for h in _root.handlers if h.__class__.__name__ != "InterceptHandler"
|
|
594
|
-
]
|
|
595
|
-
# 2. Remove only the loguru sinks added by gitingest (not pre-existing ones)
|
|
596
|
-
if hasattr(loguru_logger, "_core") and hasattr(loguru_logger._core, "handlers"):
|
|
597
|
-
current_sink_ids = set(loguru_logger._core.handlers.keys())
|
|
598
|
-
new_sink_ids = current_sink_ids - existing_sink_ids
|
|
599
|
-
for sink_id in new_sink_ids:
|
|
600
|
-
try:
|
|
601
|
-
loguru_logger.remove(sink_id)
|
|
602
|
-
except ValueError:
|
|
603
|
-
pass # Sink already removed
|
|
604
|
-
|
|
605
|
-
# Now run gitingest (it won't reconfigure logging on subsequent calls)
|
|
606
|
-
loguru_logger.disable("gitingest")
|
|
607
|
-
try:
|
|
608
|
-
_summary, tree, _content = await ingest_async(str(root))
|
|
609
|
-
finally:
|
|
610
|
-
loguru_logger.enable("gitingest")
|
|
611
|
-
except ImportError:
|
|
612
|
-
logger.debug("gitingest not installed, using fallback tree builder")
|
|
613
|
-
except Exception as e:
|
|
614
|
-
logger.debug(f"gitingest failed ({e}), using fallback tree builder")
|
|
615
|
-
|
|
616
|
-
# Fallback: Custom tree builder
|
|
617
|
-
if not tree:
|
|
618
|
-
tree = self._build_tree_fallback(root, max_depth)
|
|
619
|
-
|
|
620
|
-
if not tree:
|
|
621
|
-
return None
|
|
622
|
-
|
|
623
|
-
lines = ["## Project Structure", "", tree]
|
|
624
|
-
|
|
625
|
-
# Add file placement guidance based on common patterns
|
|
626
|
-
guidance = self._get_file_placement_guidance(root)
|
|
627
|
-
if guidance:
|
|
628
|
-
lines.append("")
|
|
629
|
-
lines.append("## File Placement Guidance")
|
|
630
|
-
lines.append(guidance)
|
|
631
|
-
|
|
632
|
-
return "\n".join(lines)
|
|
633
|
-
|
|
634
|
-
def _build_tree_fallback(self, root: Path, max_depth: int = 3) -> str | None:
|
|
635
|
-
"""
|
|
636
|
-
Fallback tree builder using pathlib when gitingest unavailable.
|
|
637
|
-
|
|
638
|
-
Args:
|
|
639
|
-
root: Project root path
|
|
640
|
-
max_depth: Maximum depth to traverse
|
|
641
|
-
|
|
642
|
-
Returns:
|
|
643
|
-
Tree string or None
|
|
644
|
-
"""
|
|
645
|
-
lines: list[str] = []
|
|
646
|
-
|
|
647
|
-
# Source directories to include
|
|
648
|
-
source_dirs = ["src", "lib", "app", "tests"]
|
|
649
|
-
|
|
650
|
-
for src_dir in source_dirs:
|
|
651
|
-
dir_path = root / src_dir
|
|
652
|
-
if dir_path.exists() and dir_path.is_dir():
|
|
653
|
-
self._build_tree_recursive(dir_path, root, lines, max_depth=max_depth)
|
|
654
|
-
|
|
655
|
-
return "\n".join(lines) if lines else None
|
|
656
|
-
|
|
657
|
-
def _build_tree_recursive(
|
|
658
|
-
self,
|
|
659
|
-
path: Path,
|
|
660
|
-
root: Path,
|
|
661
|
-
lines: list[str],
|
|
662
|
-
prefix: str = "",
|
|
663
|
-
max_depth: int = 3,
|
|
664
|
-
current_depth: int = 0,
|
|
665
|
-
) -> None:
|
|
666
|
-
"""Recursively build tree lines for a directory."""
|
|
667
|
-
if current_depth > max_depth:
|
|
668
|
-
return
|
|
669
|
-
|
|
670
|
-
skip_dirs = {
|
|
671
|
-
"__pycache__",
|
|
672
|
-
".git",
|
|
673
|
-
".venv",
|
|
674
|
-
"venv",
|
|
675
|
-
"node_modules",
|
|
676
|
-
".pytest_cache",
|
|
677
|
-
".mypy_cache",
|
|
678
|
-
"htmlcov",
|
|
679
|
-
"dist",
|
|
680
|
-
"build",
|
|
681
|
-
".egg-info",
|
|
682
|
-
}
|
|
683
|
-
|
|
684
|
-
rel_path = path.relative_to(root)
|
|
685
|
-
lines.append(f"{prefix}{rel_path}/")
|
|
686
|
-
|
|
687
|
-
try:
|
|
688
|
-
children = sorted(path.iterdir(), key=lambda p: (not p.is_dir(), p.name.lower()))
|
|
689
|
-
except PermissionError:
|
|
690
|
-
return
|
|
691
|
-
|
|
692
|
-
dirs = [c for c in children if c.is_dir() and c.name not in skip_dirs]
|
|
693
|
-
|
|
694
|
-
for i, child in enumerate(dirs):
|
|
695
|
-
is_last = i == len(dirs) - 1
|
|
696
|
-
child_prefix = prefix + (" " if is_last else "│ ")
|
|
697
|
-
self._build_tree_recursive(
|
|
698
|
-
child,
|
|
699
|
-
root,
|
|
700
|
-
lines,
|
|
701
|
-
prefix=child_prefix,
|
|
702
|
-
max_depth=max_depth,
|
|
703
|
-
current_depth=current_depth + 1,
|
|
704
|
-
)
|
|
705
|
-
|
|
706
|
-
def _get_file_placement_guidance(self, root: Path) -> str:
|
|
707
|
-
"""
|
|
708
|
-
Extract file placement guidance from CLAUDE.md or provide defaults.
|
|
709
|
-
|
|
710
|
-
Returns guidance string for common file types.
|
|
711
|
-
"""
|
|
712
|
-
guidance_lines = []
|
|
713
|
-
|
|
714
|
-
# Check for CLAUDE.md
|
|
715
|
-
claude_md = root / "CLAUDE.md"
|
|
716
|
-
if claude_md.exists():
|
|
717
|
-
try:
|
|
718
|
-
content = claude_md.read_text(encoding="utf-8")
|
|
719
|
-
# Look for architecture or file placement sections
|
|
720
|
-
if "src/gobby" in content:
|
|
721
|
-
# This is a Gobby project - provide specific guidance
|
|
722
|
-
guidance_lines.extend(
|
|
723
|
-
[
|
|
724
|
-
"- Task-related code: `src/gobby/tasks/`",
|
|
725
|
-
"- Workflow actions: `src/gobby/workflows/`",
|
|
726
|
-
"- MCP tools: `src/gobby/mcp_proxy/tools/`",
|
|
727
|
-
"- CLI commands: `src/gobby/cli/`",
|
|
728
|
-
"- Storage/DB: `src/gobby/storage/`",
|
|
729
|
-
"- Configuration: `src/gobby/config/`",
|
|
730
|
-
"- Tests mirror source: `tests/tasks/`, `tests/workflows/`, etc.",
|
|
731
|
-
]
|
|
732
|
-
)
|
|
733
|
-
except Exception:
|
|
734
|
-
pass # nosec B110 - CLAUDE.md parsing is optional
|
|
735
|
-
|
|
736
|
-
# Default guidance if CLAUDE.md doesn't provide specific info
|
|
737
|
-
if not guidance_lines:
|
|
738
|
-
# Detect common patterns
|
|
739
|
-
if (root / "src").exists():
|
|
740
|
-
pkg_dirs = [d.name for d in (root / "src").iterdir() if d.is_dir()]
|
|
741
|
-
if pkg_dirs:
|
|
742
|
-
pkg = pkg_dirs[0] # Usually the main package
|
|
743
|
-
guidance_lines.append(f"- Source code goes in `src/{pkg}/`")
|
|
744
|
-
if (root / "tests").exists():
|
|
745
|
-
guidance_lines.append("- Tests go in `tests/` mirroring source structure")
|
|
746
|
-
|
|
747
|
-
return "\n".join(guidance_lines)
|