@pennyfarthing/core 7.8.1 → 7.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -1
- package/pennyfarthing-dist/scripts/core/prime.sh +8 -0
- package/pennyfarthing_scripts/__init__.py +17 -0
- package/pennyfarthing_scripts/__pycache__/__init__.cpython-311.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/config.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira_epic_creation.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira_sync.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira_sync_story.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/sprint.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/workflow.cpython-311.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/workflow.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/bellmode_hook.py +154 -0
- package/pennyfarthing_scripts/brownfield/__init__.py +35 -0
- package/pennyfarthing_scripts/brownfield/__main__.py +7 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/discover.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/cli.py +131 -0
- package/pennyfarthing_scripts/brownfield/discover.py +753 -0
- package/pennyfarthing_scripts/common/__init__.py +49 -0
- package/pennyfarthing_scripts/common/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/__pycache__/config.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/__pycache__/output.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/config.py +65 -0
- package/pennyfarthing_scripts/common/output.py +180 -0
- package/pennyfarthing_scripts/config.py +21 -0
- package/pennyfarthing_scripts/git/__init__.py +29 -0
- package/pennyfarthing_scripts/git/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/git/__pycache__/create_branches.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/git/__pycache__/status_all.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/git/create_branches.py +439 -0
- package/pennyfarthing_scripts/git/status_all.py +310 -0
- package/pennyfarthing_scripts/hooks.py +455 -0
- package/pennyfarthing_scripts/jira/__init__.py +93 -0
- package/pennyfarthing_scripts/jira/__main__.py +10 -0
- package/pennyfarthing_scripts/jira/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/bidirectional.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/claim.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/client.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/compat.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/epic.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/mappings.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/story.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/sync.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/bidirectional.py +561 -0
- package/pennyfarthing_scripts/jira/claim.py +211 -0
- package/pennyfarthing_scripts/jira/cli.py +150 -0
- package/pennyfarthing_scripts/jira/client.py +613 -0
- package/pennyfarthing_scripts/jira/epic.py +176 -0
- package/pennyfarthing_scripts/jira/story.py +219 -0
- package/pennyfarthing_scripts/jira/sync.py +350 -0
- package/pennyfarthing_scripts/jira_bidirectional_sync.py +37 -0
- package/pennyfarthing_scripts/jira_epic_creation.py +30 -0
- package/pennyfarthing_scripts/jira_sync.py +36 -0
- package/pennyfarthing_scripts/jira_sync_story.py +30 -0
- package/pennyfarthing_scripts/output.py +37 -0
- package/pennyfarthing_scripts/preflight/__init__.py +17 -0
- package/pennyfarthing_scripts/preflight/__main__.py +10 -0
- package/pennyfarthing_scripts/preflight/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/__pycache__/finish.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/cli.py +141 -0
- package/pennyfarthing_scripts/preflight/finish.py +382 -0
- package/pennyfarthing_scripts/pretooluse_hook.py +142 -0
- package/pennyfarthing_scripts/prime/__init__.py +38 -0
- package/pennyfarthing_scripts/prime/__main__.py +8 -0
- package/pennyfarthing_scripts/prime/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/loader.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/persona.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/session.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/workflow.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/cli.py +220 -0
- package/pennyfarthing_scripts/prime/loader.py +239 -0
- package/pennyfarthing_scripts/sprint/__init__.py +66 -0
- package/pennyfarthing_scripts/sprint/__main__.py +10 -0
- package/pennyfarthing_scripts/sprint/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/archive.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/loader.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/status.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/validator.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/work.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/archive.py +108 -0
- package/pennyfarthing_scripts/sprint/cli.py +124 -0
- package/pennyfarthing_scripts/sprint/loader.py +193 -0
- package/pennyfarthing_scripts/sprint/status.py +122 -0
- package/pennyfarthing_scripts/sprint/validator.py +405 -0
- package/pennyfarthing_scripts/sprint/work.py +192 -0
- package/pennyfarthing_scripts/story/__init__.py +67 -0
- package/pennyfarthing_scripts/story/__main__.py +10 -0
- package/pennyfarthing_scripts/story/cli.py +105 -0
- package/pennyfarthing_scripts/story/create.py +167 -0
- package/pennyfarthing_scripts/story/size.py +113 -0
- package/pennyfarthing_scripts/story/template.py +151 -0
- package/pennyfarthing_scripts/swebench.py +216 -0
- package/pennyfarthing_scripts/tests/__init__.py +1 -0
- package/pennyfarthing_scripts/tests/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/conftest.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_brownfield.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_git_utils.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_prime.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_sprint_validator.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/conftest.py +106 -0
- package/pennyfarthing_scripts/tests/test_brownfield.py +842 -0
- package/pennyfarthing_scripts/tests/test_cli_modules.py +245 -0
- package/pennyfarthing_scripts/tests/test_common.py +180 -0
- package/pennyfarthing_scripts/tests/test_git_utils.py +866 -0
- package/pennyfarthing_scripts/tests/test_jira_package.py +334 -0
- package/pennyfarthing_scripts/tests/test_package_structure.py +372 -0
- package/pennyfarthing_scripts/tests/test_prime.py +397 -0
- package/pennyfarthing_scripts/tests/test_sprint_package.py +236 -0
- package/pennyfarthing_scripts/tests/test_sprint_validator.py +675 -0
- package/pennyfarthing_scripts/tests/test_story_package.py +156 -0
- package/pennyfarthing_scripts/welcome_hook.py +157 -0
- package/pennyfarthing_scripts/workflow.py +183 -0
|
@@ -0,0 +1,753 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Brownfield discovery - Analyze existing codebases.
|
|
3
|
+
|
|
4
|
+
Scans codebases and generates AI-ready documentation matching
|
|
5
|
+
_bmad-output format.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import re
|
|
11
|
+
from dataclasses import dataclass, field
|
|
12
|
+
from enum import Enum
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Literal
|
|
15
|
+
|
|
16
|
+
# Try to import tomllib (Python 3.11+) or fall back to tomli
|
|
17
|
+
try:
|
|
18
|
+
import tomllib
|
|
19
|
+
except ImportError:
|
|
20
|
+
try:
|
|
21
|
+
import tomli as tomllib # type: ignore
|
|
22
|
+
except ImportError:
|
|
23
|
+
tomllib = None # type: ignore
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class DepthLevel(Enum):
|
|
27
|
+
"""Discovery depth levels."""
|
|
28
|
+
QUICK = "quick" # Surface scan - just package files
|
|
29
|
+
STANDARD = "standard" # Typical scan - package + key dirs
|
|
30
|
+
DEEP = "deep" # Comprehensive - full directory tree
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ProjectType(Enum):
|
|
34
|
+
"""Detected project types."""
|
|
35
|
+
MONOREPO = "monorepo"
|
|
36
|
+
SINGLE_PACKAGE = "single_package"
|
|
37
|
+
MULTI_LANGUAGE = "multi_language"
|
|
38
|
+
UNKNOWN = "unknown"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class TechStackItem:
|
|
43
|
+
"""A detected technology in the stack."""
|
|
44
|
+
name: str
|
|
45
|
+
version: str | None = None
|
|
46
|
+
category: str = "unknown" # runtime, dev, test, build, etc.
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass
|
|
50
|
+
class DirectoryNode:
|
|
51
|
+
"""A node in the directory tree."""
|
|
52
|
+
path: Path
|
|
53
|
+
name: str
|
|
54
|
+
is_dir: bool
|
|
55
|
+
children: list["DirectoryNode"] = field(default_factory=list)
|
|
56
|
+
annotation: str = "" # Description of directory purpose
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass
|
|
60
|
+
class ArchitecturePattern:
|
|
61
|
+
"""A detected architecture pattern."""
|
|
62
|
+
name: str
|
|
63
|
+
description: str
|
|
64
|
+
evidence: list[str] = field(default_factory=list)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class DiscoveryResult:
|
|
69
|
+
"""Complete brownfield discovery result."""
|
|
70
|
+
project_path: Path
|
|
71
|
+
project_type: ProjectType
|
|
72
|
+
project_name: str
|
|
73
|
+
version: str | None
|
|
74
|
+
tech_stack: list[TechStackItem] = field(default_factory=list)
|
|
75
|
+
directory_tree: DirectoryNode | None = None
|
|
76
|
+
patterns: list[ArchitecturePattern] = field(default_factory=list)
|
|
77
|
+
error: str | None = None
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def success(self) -> bool:
|
|
81
|
+
"""Return True if discovery succeeded."""
|
|
82
|
+
return self.error is None
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
# Directories to always exclude from scanning
|
|
86
|
+
EXCLUDED_DIRS = {
|
|
87
|
+
"node_modules", ".git", "__pycache__", ".venv", "venv",
|
|
88
|
+
".pytest_cache", ".mypy_cache", ".tox", "dist", "build",
|
|
89
|
+
".eggs", "*.egg-info", ".cache", ".idea", ".vscode",
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
# Common directory annotations
|
|
93
|
+
DIR_ANNOTATIONS = {
|
|
94
|
+
"src": "Source code",
|
|
95
|
+
"lib": "Library code",
|
|
96
|
+
"test": "Test files",
|
|
97
|
+
"tests": "Test files",
|
|
98
|
+
"spec": "Test specifications",
|
|
99
|
+
"docs": "Documentation",
|
|
100
|
+
"doc": "Documentation",
|
|
101
|
+
"scripts": "Utility scripts",
|
|
102
|
+
"bin": "Executable scripts",
|
|
103
|
+
"config": "Configuration files",
|
|
104
|
+
"configs": "Configuration files",
|
|
105
|
+
"public": "Public assets",
|
|
106
|
+
"static": "Static assets",
|
|
107
|
+
"assets": "Asset files",
|
|
108
|
+
"images": "Image files",
|
|
109
|
+
"styles": "Stylesheets",
|
|
110
|
+
"css": "CSS stylesheets",
|
|
111
|
+
"components": "UI components",
|
|
112
|
+
"pages": "Page components",
|
|
113
|
+
"views": "View templates",
|
|
114
|
+
"templates": "Template files",
|
|
115
|
+
"models": "Data models",
|
|
116
|
+
"controllers": "Controller logic",
|
|
117
|
+
"services": "Service layer",
|
|
118
|
+
"repositories": "Data repositories",
|
|
119
|
+
"api": "API endpoints",
|
|
120
|
+
"utils": "Utility functions",
|
|
121
|
+
"helpers": "Helper functions",
|
|
122
|
+
"types": "Type definitions",
|
|
123
|
+
"interfaces": "Interface definitions",
|
|
124
|
+
"packages": "Workspace packages",
|
|
125
|
+
"apps": "Application packages",
|
|
126
|
+
"tools": "Development tools",
|
|
127
|
+
"fixtures": "Test fixtures",
|
|
128
|
+
"mocks": "Mock implementations",
|
|
129
|
+
"stubs": "Stub implementations",
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _parse_json_safe(path: Path) -> dict | None:
|
|
134
|
+
"""Parse JSON file safely, returning None on error."""
|
|
135
|
+
try:
|
|
136
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
137
|
+
except (json.JSONDecodeError, OSError, UnicodeDecodeError):
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _parse_toml_safe(path: Path) -> dict | None:
|
|
142
|
+
"""Parse TOML file safely, returning None on error."""
|
|
143
|
+
if tomllib is None:
|
|
144
|
+
return None
|
|
145
|
+
try:
|
|
146
|
+
return tomllib.loads(path.read_text(encoding="utf-8"))
|
|
147
|
+
except (OSError, UnicodeDecodeError):
|
|
148
|
+
return None
|
|
149
|
+
except Exception:
|
|
150
|
+
# tomllib.TOMLDecodeError or similar
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
async def detect_project_type(path: Path) -> ProjectType:
|
|
155
|
+
"""Detect if project is monorepo, single package, etc.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
path: Root path to analyze
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
Detected ProjectType
|
|
162
|
+
"""
|
|
163
|
+
if not path.exists() or not path.is_dir():
|
|
164
|
+
return ProjectType.UNKNOWN
|
|
165
|
+
|
|
166
|
+
# Check for monorepo indicators
|
|
167
|
+
pnpm_workspace = path / "pnpm-workspace.yaml"
|
|
168
|
+
lerna_json = path / "lerna.json"
|
|
169
|
+
package_json = path / "package.json"
|
|
170
|
+
|
|
171
|
+
# pnpm workspace
|
|
172
|
+
if pnpm_workspace.exists():
|
|
173
|
+
return ProjectType.MONOREPO
|
|
174
|
+
|
|
175
|
+
# lerna monorepo
|
|
176
|
+
if lerna_json.exists():
|
|
177
|
+
return ProjectType.MONOREPO
|
|
178
|
+
|
|
179
|
+
# npm/yarn workspaces in package.json
|
|
180
|
+
if package_json.exists():
|
|
181
|
+
data = _parse_json_safe(package_json)
|
|
182
|
+
if data and "workspaces" in data:
|
|
183
|
+
return ProjectType.MONOREPO
|
|
184
|
+
|
|
185
|
+
# Count language manifest files
|
|
186
|
+
manifests = {
|
|
187
|
+
"node": package_json.exists(),
|
|
188
|
+
"python": (path / "pyproject.toml").exists() or (path / "setup.py").exists(),
|
|
189
|
+
"go": (path / "go.mod").exists(),
|
|
190
|
+
"rust": (path / "Cargo.toml").exists(),
|
|
191
|
+
"java": (path / "pom.xml").exists() or (path / "build.gradle").exists(),
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
language_count = sum(1 for present in manifests.values() if present)
|
|
195
|
+
|
|
196
|
+
if language_count > 1:
|
|
197
|
+
return ProjectType.MULTI_LANGUAGE
|
|
198
|
+
|
|
199
|
+
if language_count == 1:
|
|
200
|
+
return ProjectType.SINGLE_PACKAGE
|
|
201
|
+
|
|
202
|
+
return ProjectType.UNKNOWN
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
async def detect_tech_stack(
|
|
206
|
+
path: Path,
|
|
207
|
+
depth: DepthLevel = DepthLevel.STANDARD
|
|
208
|
+
) -> list[TechStackItem]:
|
|
209
|
+
"""Detect technology stack from manifest files.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
path: Root path to analyze
|
|
213
|
+
depth: How deep to search
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
List of detected technologies
|
|
217
|
+
"""
|
|
218
|
+
if not path.exists() or not path.is_dir():
|
|
219
|
+
return []
|
|
220
|
+
|
|
221
|
+
items: list[TechStackItem] = []
|
|
222
|
+
|
|
223
|
+
# Parse package.json (Node.js)
|
|
224
|
+
package_json = path / "package.json"
|
|
225
|
+
if package_json.exists():
|
|
226
|
+
data = _parse_json_safe(package_json)
|
|
227
|
+
if data:
|
|
228
|
+
# Runtime dependencies
|
|
229
|
+
for name, version in data.get("dependencies", {}).items():
|
|
230
|
+
items.append(TechStackItem(name, version, "runtime"))
|
|
231
|
+
# Dev dependencies
|
|
232
|
+
for name, version in data.get("devDependencies", {}).items():
|
|
233
|
+
items.append(TechStackItem(name, version, "dev"))
|
|
234
|
+
|
|
235
|
+
# Parse pyproject.toml (Python)
|
|
236
|
+
pyproject = path / "pyproject.toml"
|
|
237
|
+
if pyproject.exists():
|
|
238
|
+
data = _parse_toml_safe(pyproject)
|
|
239
|
+
if data:
|
|
240
|
+
project = data.get("project", {})
|
|
241
|
+
# Main dependencies
|
|
242
|
+
for dep in project.get("dependencies", []):
|
|
243
|
+
name, version = _parse_python_dep(dep)
|
|
244
|
+
items.append(TechStackItem(name, version, "runtime"))
|
|
245
|
+
# Optional/dev dependencies
|
|
246
|
+
for group, deps in project.get("optional-dependencies", {}).items():
|
|
247
|
+
category = "dev" if group in ("dev", "test", "development") else "optional"
|
|
248
|
+
for dep in deps:
|
|
249
|
+
name, version = _parse_python_dep(dep)
|
|
250
|
+
items.append(TechStackItem(name, version, category))
|
|
251
|
+
|
|
252
|
+
# Parse go.mod (Go)
|
|
253
|
+
go_mod = path / "go.mod"
|
|
254
|
+
if go_mod.exists():
|
|
255
|
+
try:
|
|
256
|
+
content = go_mod.read_text(encoding="utf-8")
|
|
257
|
+
# Extract Go version
|
|
258
|
+
go_match = re.search(r"^go\s+(\d+\.\d+)", content, re.MULTILINE)
|
|
259
|
+
if go_match:
|
|
260
|
+
items.append(TechStackItem("go", go_match.group(1), "runtime"))
|
|
261
|
+
# Extract requires
|
|
262
|
+
for match in re.finditer(r"^\s*(\S+)\s+v?([\d.]+)", content, re.MULTILINE):
|
|
263
|
+
module = match.group(1)
|
|
264
|
+
version = match.group(2)
|
|
265
|
+
if "/" in module: # It's a dependency, not the module declaration
|
|
266
|
+
name = module.split("/")[-1]
|
|
267
|
+
items.append(TechStackItem(name, version, "runtime"))
|
|
268
|
+
except OSError:
|
|
269
|
+
pass
|
|
270
|
+
|
|
271
|
+
# Parse Cargo.toml (Rust)
|
|
272
|
+
cargo_toml = path / "Cargo.toml"
|
|
273
|
+
if cargo_toml.exists():
|
|
274
|
+
data = _parse_toml_safe(cargo_toml)
|
|
275
|
+
if data:
|
|
276
|
+
items.append(TechStackItem("rust", None, "runtime"))
|
|
277
|
+
for name, dep_info in data.get("dependencies", {}).items():
|
|
278
|
+
if isinstance(dep_info, str):
|
|
279
|
+
version = dep_info
|
|
280
|
+
elif isinstance(dep_info, dict):
|
|
281
|
+
version = dep_info.get("version")
|
|
282
|
+
else:
|
|
283
|
+
version = None
|
|
284
|
+
items.append(TechStackItem(name, version, "runtime"))
|
|
285
|
+
|
|
286
|
+
# Deep scan: look in subdirectories for workspace packages
|
|
287
|
+
if depth == DepthLevel.DEEP:
|
|
288
|
+
packages_dir = path / "packages"
|
|
289
|
+
if packages_dir.exists() and packages_dir.is_dir():
|
|
290
|
+
for subdir in packages_dir.iterdir():
|
|
291
|
+
if subdir.is_dir():
|
|
292
|
+
sub_items = await detect_tech_stack(subdir, DepthLevel.QUICK)
|
|
293
|
+
items.extend(sub_items)
|
|
294
|
+
|
|
295
|
+
return items
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def _parse_python_dep(dep: str) -> tuple[str, str | None]:
|
|
299
|
+
"""Parse a Python dependency string like 'requests>=2.28.0'."""
|
|
300
|
+
# Match: name[extras]>=version or name>=version or just name
|
|
301
|
+
match = re.match(r"^([a-zA-Z0-9_-]+)(?:\[.*\])?(?:([><=!~]+)(.*))?$", dep.strip())
|
|
302
|
+
if match:
|
|
303
|
+
name = match.group(1)
|
|
304
|
+
version = match.group(3) if match.group(3) else None
|
|
305
|
+
return name, version
|
|
306
|
+
return dep, None
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
async def scan_directory_structure(
|
|
310
|
+
path: Path,
|
|
311
|
+
depth: DepthLevel = DepthLevel.STANDARD,
|
|
312
|
+
max_depth: int = 5,
|
|
313
|
+
_current_depth: int = 0,
|
|
314
|
+
_visited: set[Path] | None = None
|
|
315
|
+
) -> DirectoryNode:
|
|
316
|
+
"""Scan directory structure with annotations.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
path: Root path to scan
|
|
320
|
+
depth: Discovery depth level
|
|
321
|
+
max_depth: Maximum directory depth to traverse
|
|
322
|
+
_current_depth: Internal - current recursion depth
|
|
323
|
+
_visited: Internal - visited paths (for symlink loop detection)
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
Root DirectoryNode with children
|
|
327
|
+
"""
|
|
328
|
+
if _visited is None:
|
|
329
|
+
_visited = set()
|
|
330
|
+
|
|
331
|
+
# Resolve to real path for symlink detection
|
|
332
|
+
try:
|
|
333
|
+
real_path = path.resolve()
|
|
334
|
+
except OSError:
|
|
335
|
+
real_path = path
|
|
336
|
+
|
|
337
|
+
# Check for symlink loops
|
|
338
|
+
if real_path in _visited:
|
|
339
|
+
return DirectoryNode(
|
|
340
|
+
path=path,
|
|
341
|
+
name=path.name or str(path),
|
|
342
|
+
is_dir=True,
|
|
343
|
+
annotation="(symlink loop)",
|
|
344
|
+
)
|
|
345
|
+
_visited.add(real_path)
|
|
346
|
+
|
|
347
|
+
node = DirectoryNode(
|
|
348
|
+
path=path,
|
|
349
|
+
name=path.name or str(path),
|
|
350
|
+
is_dir=path.is_dir() if path.exists() else False,
|
|
351
|
+
annotation=DIR_ANNOTATIONS.get(path.name.lower(), ""),
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
if not path.is_dir() or _current_depth >= max_depth:
|
|
355
|
+
return node
|
|
356
|
+
|
|
357
|
+
# Get children
|
|
358
|
+
children: list[DirectoryNode] = []
|
|
359
|
+
try:
|
|
360
|
+
entries = list(path.iterdir())
|
|
361
|
+
except (PermissionError, OSError):
|
|
362
|
+
return node
|
|
363
|
+
|
|
364
|
+
# Filter and sort entries
|
|
365
|
+
for entry in sorted(entries, key=lambda e: (not e.is_dir(), e.name.lower())):
|
|
366
|
+
# Skip excluded directories
|
|
367
|
+
if entry.name in EXCLUDED_DIRS:
|
|
368
|
+
continue
|
|
369
|
+
if entry.name.startswith(".") and entry.name not in (".github", ".claude"):
|
|
370
|
+
continue
|
|
371
|
+
|
|
372
|
+
if entry.is_dir():
|
|
373
|
+
# Recursively scan subdirectories
|
|
374
|
+
child = await scan_directory_structure(
|
|
375
|
+
entry,
|
|
376
|
+
depth,
|
|
377
|
+
max_depth,
|
|
378
|
+
_current_depth + 1,
|
|
379
|
+
_visited,
|
|
380
|
+
)
|
|
381
|
+
children.append(child)
|
|
382
|
+
elif entry.is_file():
|
|
383
|
+
children.append(DirectoryNode(
|
|
384
|
+
path=entry,
|
|
385
|
+
name=entry.name,
|
|
386
|
+
is_dir=False,
|
|
387
|
+
))
|
|
388
|
+
|
|
389
|
+
node.children = children
|
|
390
|
+
return node
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
async def detect_architecture_patterns(
|
|
394
|
+
path: Path,
|
|
395
|
+
depth: DepthLevel = DepthLevel.STANDARD
|
|
396
|
+
) -> list[ArchitecturePattern]:
|
|
397
|
+
"""Detect common architecture patterns.
|
|
398
|
+
|
|
399
|
+
Args:
|
|
400
|
+
path: Root path to analyze
|
|
401
|
+
depth: Discovery depth level
|
|
402
|
+
|
|
403
|
+
Returns:
|
|
404
|
+
List of detected patterns with evidence
|
|
405
|
+
"""
|
|
406
|
+
if not path.exists() or not path.is_dir():
|
|
407
|
+
return []
|
|
408
|
+
|
|
409
|
+
patterns: list[ArchitecturePattern] = []
|
|
410
|
+
|
|
411
|
+
try:
|
|
412
|
+
dirs = {d.name.lower() for d in path.iterdir() if d.is_dir()}
|
|
413
|
+
except (PermissionError, OSError):
|
|
414
|
+
return []
|
|
415
|
+
|
|
416
|
+
# Check for monorepo/workspace pattern
|
|
417
|
+
if "packages" in dirs or "apps" in dirs:
|
|
418
|
+
evidence = []
|
|
419
|
+
if "packages" in dirs:
|
|
420
|
+
evidence.append("packages/ directory present")
|
|
421
|
+
if "apps" in dirs:
|
|
422
|
+
evidence.append("apps/ directory present")
|
|
423
|
+
if (path / "pnpm-workspace.yaml").exists():
|
|
424
|
+
evidence.append("pnpm-workspace.yaml present")
|
|
425
|
+
if (path / "package.json").exists():
|
|
426
|
+
data = _parse_json_safe(path / "package.json")
|
|
427
|
+
if data and "workspaces" in data:
|
|
428
|
+
evidence.append("workspaces field in package.json")
|
|
429
|
+
|
|
430
|
+
if evidence:
|
|
431
|
+
patterns.append(ArchitecturePattern(
|
|
432
|
+
"monorepo",
|
|
433
|
+
"Monorepo with multiple packages/apps",
|
|
434
|
+
evidence,
|
|
435
|
+
))
|
|
436
|
+
|
|
437
|
+
# Check for MVC pattern
|
|
438
|
+
mvc_dirs = {"models", "views", "controllers"}
|
|
439
|
+
if mvc_dirs.issubset(dirs):
|
|
440
|
+
patterns.append(ArchitecturePattern(
|
|
441
|
+
"mvc",
|
|
442
|
+
"Model-View-Controller architecture",
|
|
443
|
+
[f"{d}/ directory present" for d in mvc_dirs],
|
|
444
|
+
))
|
|
445
|
+
|
|
446
|
+
# Check for layered architecture
|
|
447
|
+
layered_indicators = {"api", "services", "repositories"}
|
|
448
|
+
matches = layered_indicators.intersection(dirs)
|
|
449
|
+
if len(matches) >= 2:
|
|
450
|
+
patterns.append(ArchitecturePattern(
|
|
451
|
+
"layered",
|
|
452
|
+
"Layered architecture with service separation",
|
|
453
|
+
[f"{d}/ directory present" for d in matches],
|
|
454
|
+
))
|
|
455
|
+
|
|
456
|
+
# Check for TypeScript pattern
|
|
457
|
+
if (path / "tsconfig.json").exists():
|
|
458
|
+
evidence = ["tsconfig.json present"]
|
|
459
|
+
if (path / "src").is_dir():
|
|
460
|
+
evidence.append("src/ directory present")
|
|
461
|
+
patterns.append(ArchitecturePattern(
|
|
462
|
+
"typescript",
|
|
463
|
+
"TypeScript project with compilation",
|
|
464
|
+
evidence,
|
|
465
|
+
))
|
|
466
|
+
|
|
467
|
+
# Check for src/lib pattern
|
|
468
|
+
if "src" in dirs or "lib" in dirs:
|
|
469
|
+
evidence = []
|
|
470
|
+
if "src" in dirs:
|
|
471
|
+
evidence.append("src/ directory present")
|
|
472
|
+
if "lib" in dirs:
|
|
473
|
+
evidence.append("lib/ directory present")
|
|
474
|
+
if evidence and not any(p.name == "typescript" for p in patterns):
|
|
475
|
+
patterns.append(ArchitecturePattern(
|
|
476
|
+
"source-separation",
|
|
477
|
+
"Source code separated into dedicated directory",
|
|
478
|
+
evidence,
|
|
479
|
+
))
|
|
480
|
+
|
|
481
|
+
return patterns
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def generate_project_overview(result: DiscoveryResult) -> str:
|
|
485
|
+
"""Generate project-overview.md content.
|
|
486
|
+
|
|
487
|
+
Args:
|
|
488
|
+
result: Discovery result
|
|
489
|
+
|
|
490
|
+
Returns:
|
|
491
|
+
Markdown content for project overview
|
|
492
|
+
"""
|
|
493
|
+
lines = [
|
|
494
|
+
"# Project Overview",
|
|
495
|
+
"",
|
|
496
|
+
"## Executive Summary",
|
|
497
|
+
"",
|
|
498
|
+
f"**{result.project_name}** is a {result.project_type.value.replace('_', ' ')} project.",
|
|
499
|
+
"",
|
|
500
|
+
"| Attribute | Value |",
|
|
501
|
+
"|-----------|-------|",
|
|
502
|
+
f"| **Name** | {result.project_name} |",
|
|
503
|
+
f"| **Version** | {result.version or 'N/A'} |",
|
|
504
|
+
f"| **Type** | {result.project_type.value} |",
|
|
505
|
+
f"| **Path** | {result.project_path} |",
|
|
506
|
+
"",
|
|
507
|
+
]
|
|
508
|
+
|
|
509
|
+
if result.patterns:
|
|
510
|
+
lines.extend([
|
|
511
|
+
"## Architecture Patterns",
|
|
512
|
+
"",
|
|
513
|
+
])
|
|
514
|
+
for pattern in result.patterns:
|
|
515
|
+
lines.append(f"### {pattern.name.title()}")
|
|
516
|
+
lines.append("")
|
|
517
|
+
lines.append(pattern.description)
|
|
518
|
+
lines.append("")
|
|
519
|
+
if pattern.evidence:
|
|
520
|
+
lines.append("**Evidence:**")
|
|
521
|
+
for e in pattern.evidence:
|
|
522
|
+
lines.append(f"- {e}")
|
|
523
|
+
lines.append("")
|
|
524
|
+
|
|
525
|
+
return "\n".join(lines)
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def generate_tech_stack_doc(result: DiscoveryResult) -> str:
|
|
529
|
+
"""Generate technology-stack.md content.
|
|
530
|
+
|
|
531
|
+
Args:
|
|
532
|
+
result: Discovery result
|
|
533
|
+
|
|
534
|
+
Returns:
|
|
535
|
+
Markdown content for tech stack
|
|
536
|
+
"""
|
|
537
|
+
lines = [
|
|
538
|
+
"# Technology Stack",
|
|
539
|
+
"",
|
|
540
|
+
]
|
|
541
|
+
|
|
542
|
+
if not result.tech_stack:
|
|
543
|
+
lines.append("No technologies detected.")
|
|
544
|
+
return "\n".join(lines)
|
|
545
|
+
|
|
546
|
+
# Group by category
|
|
547
|
+
categories: dict[str, list[TechStackItem]] = {}
|
|
548
|
+
for item in result.tech_stack:
|
|
549
|
+
cat = item.category
|
|
550
|
+
if cat not in categories:
|
|
551
|
+
categories[cat] = []
|
|
552
|
+
categories[cat].append(item)
|
|
553
|
+
|
|
554
|
+
for category, items in sorted(categories.items()):
|
|
555
|
+
lines.append(f"## {category.title()} Dependencies")
|
|
556
|
+
lines.append("")
|
|
557
|
+
lines.append("| Package | Version |")
|
|
558
|
+
lines.append("|---------|---------|")
|
|
559
|
+
for item in sorted(items, key=lambda x: x.name.lower()):
|
|
560
|
+
lines.append(f"| {item.name} | {item.version or 'N/A'} |")
|
|
561
|
+
lines.append("")
|
|
562
|
+
|
|
563
|
+
return "\n".join(lines)
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
def _format_tree(node: DirectoryNode, prefix: str = "", is_last: bool = True) -> list[str]:
|
|
567
|
+
"""Format directory tree with box-drawing characters."""
|
|
568
|
+
lines: list[str] = []
|
|
569
|
+
|
|
570
|
+
connector = "└── " if is_last else "├── "
|
|
571
|
+
annotation = f" # {node.annotation}" if node.annotation else ""
|
|
572
|
+
|
|
573
|
+
lines.append(f"{prefix}{connector}{node.name}{annotation}")
|
|
574
|
+
|
|
575
|
+
if node.children:
|
|
576
|
+
extension = " " if is_last else "│ "
|
|
577
|
+
new_prefix = prefix + extension
|
|
578
|
+
for i, child in enumerate(node.children):
|
|
579
|
+
is_child_last = i == len(node.children) - 1
|
|
580
|
+
lines.extend(_format_tree(child, new_prefix, is_child_last))
|
|
581
|
+
|
|
582
|
+
return lines
|
|
583
|
+
|
|
584
|
+
|
|
585
|
+
def generate_source_tree_doc(result: DiscoveryResult) -> str:
|
|
586
|
+
"""Generate source-tree-analysis.md content.
|
|
587
|
+
|
|
588
|
+
Args:
|
|
589
|
+
result: Discovery result
|
|
590
|
+
|
|
591
|
+
Returns:
|
|
592
|
+
Markdown content for source tree
|
|
593
|
+
"""
|
|
594
|
+
lines = [
|
|
595
|
+
"# Source Tree Analysis",
|
|
596
|
+
"",
|
|
597
|
+
"## Directory Structure",
|
|
598
|
+
"",
|
|
599
|
+
"```",
|
|
600
|
+
result.project_name,
|
|
601
|
+
]
|
|
602
|
+
|
|
603
|
+
if result.directory_tree and result.directory_tree.children:
|
|
604
|
+
for i, child in enumerate(result.directory_tree.children):
|
|
605
|
+
is_last = i == len(result.directory_tree.children) - 1
|
|
606
|
+
lines.extend(_format_tree(child, "", is_last))
|
|
607
|
+
|
|
608
|
+
lines.append("```")
|
|
609
|
+
lines.append("")
|
|
610
|
+
|
|
611
|
+
return "\n".join(lines)
|
|
612
|
+
|
|
613
|
+
|
|
614
|
+
def generate_ai_guidance_doc(result: DiscoveryResult) -> str:
|
|
615
|
+
"""Generate ai-guidance.md content.
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
result: Discovery result
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
Markdown content for AI guidance
|
|
622
|
+
"""
|
|
623
|
+
lines = [
|
|
624
|
+
"# AI Guidance",
|
|
625
|
+
"",
|
|
626
|
+
f"This document provides guidance for AI agents working on **{result.project_name}**.",
|
|
627
|
+
"",
|
|
628
|
+
"## Project Type",
|
|
629
|
+
"",
|
|
630
|
+
f"This is a **{result.project_type.value}** project.",
|
|
631
|
+
"",
|
|
632
|
+
]
|
|
633
|
+
|
|
634
|
+
if result.patterns:
|
|
635
|
+
lines.extend([
|
|
636
|
+
"## Detected Patterns",
|
|
637
|
+
"",
|
|
638
|
+
])
|
|
639
|
+
for pattern in result.patterns:
|
|
640
|
+
lines.append(f"- **{pattern.name}**: {pattern.description}")
|
|
641
|
+
lines.append("")
|
|
642
|
+
|
|
643
|
+
if result.tech_stack:
|
|
644
|
+
# Get unique tech names
|
|
645
|
+
tech_names = sorted(set(item.name.lower() for item in result.tech_stack))[:10]
|
|
646
|
+
lines.extend([
|
|
647
|
+
"## Key Technologies",
|
|
648
|
+
"",
|
|
649
|
+
f"The project uses: {', '.join(tech_names)}",
|
|
650
|
+
"",
|
|
651
|
+
])
|
|
652
|
+
|
|
653
|
+
lines.extend([
|
|
654
|
+
"## Recommendations",
|
|
655
|
+
"",
|
|
656
|
+
"- Follow existing code patterns and conventions",
|
|
657
|
+
"- Check for existing tests before modifying code",
|
|
658
|
+
"- Review the tech stack documentation for version constraints",
|
|
659
|
+
"",
|
|
660
|
+
])
|
|
661
|
+
|
|
662
|
+
return "\n".join(lines)
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
async def discover(
|
|
666
|
+
path: Path,
|
|
667
|
+
depth: DepthLevel = DepthLevel.STANDARD,
|
|
668
|
+
output_dir: Path | None = None
|
|
669
|
+
) -> DiscoveryResult:
|
|
670
|
+
"""Run complete brownfield discovery.
|
|
671
|
+
|
|
672
|
+
Args:
|
|
673
|
+
path: Root path to analyze
|
|
674
|
+
depth: Discovery depth level (quick/standard/deep)
|
|
675
|
+
output_dir: Optional output directory for generated docs
|
|
676
|
+
|
|
677
|
+
Returns:
|
|
678
|
+
DiscoveryResult with all findings
|
|
679
|
+
"""
|
|
680
|
+
# Validate path
|
|
681
|
+
if not path.exists():
|
|
682
|
+
return DiscoveryResult(
|
|
683
|
+
project_path=path,
|
|
684
|
+
project_type=ProjectType.UNKNOWN,
|
|
685
|
+
project_name=path.name,
|
|
686
|
+
version=None,
|
|
687
|
+
error=f"Path does not exist: {path}",
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
if not path.is_dir():
|
|
691
|
+
return DiscoveryResult(
|
|
692
|
+
project_path=path,
|
|
693
|
+
project_type=ProjectType.UNKNOWN,
|
|
694
|
+
project_name=path.name,
|
|
695
|
+
version=None,
|
|
696
|
+
error=f"Path is not a directory: {path}",
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
# Extract project name and version from manifests
|
|
700
|
+
project_name = path.name
|
|
701
|
+
version: str | None = None
|
|
702
|
+
|
|
703
|
+
package_json = path / "package.json"
|
|
704
|
+
if package_json.exists():
|
|
705
|
+
data = _parse_json_safe(package_json)
|
|
706
|
+
if data:
|
|
707
|
+
project_name = data.get("name", project_name)
|
|
708
|
+
version = data.get("version")
|
|
709
|
+
|
|
710
|
+
pyproject = path / "pyproject.toml"
|
|
711
|
+
if pyproject.exists() and version is None:
|
|
712
|
+
data = _parse_toml_safe(pyproject)
|
|
713
|
+
if data:
|
|
714
|
+
project = data.get("project", {})
|
|
715
|
+
project_name = project.get("name", project_name)
|
|
716
|
+
version = project.get("version")
|
|
717
|
+
|
|
718
|
+
# Run detection tasks in parallel
|
|
719
|
+
project_type, tech_stack, directory_tree, patterns = await asyncio.gather(
|
|
720
|
+
detect_project_type(path),
|
|
721
|
+
detect_tech_stack(path, depth),
|
|
722
|
+
scan_directory_structure(path, depth),
|
|
723
|
+
detect_architecture_patterns(path, depth),
|
|
724
|
+
)
|
|
725
|
+
|
|
726
|
+
result = DiscoveryResult(
|
|
727
|
+
project_path=path,
|
|
728
|
+
project_type=project_type,
|
|
729
|
+
project_name=project_name,
|
|
730
|
+
version=version,
|
|
731
|
+
tech_stack=tech_stack,
|
|
732
|
+
directory_tree=directory_tree,
|
|
733
|
+
patterns=patterns,
|
|
734
|
+
)
|
|
735
|
+
|
|
736
|
+
# Write output files if requested
|
|
737
|
+
if output_dir:
|
|
738
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
739
|
+
|
|
740
|
+
(output_dir / "project-overview.md").write_text(
|
|
741
|
+
generate_project_overview(result), encoding="utf-8"
|
|
742
|
+
)
|
|
743
|
+
(output_dir / "technology-stack.md").write_text(
|
|
744
|
+
generate_tech_stack_doc(result), encoding="utf-8"
|
|
745
|
+
)
|
|
746
|
+
(output_dir / "source-tree-analysis.md").write_text(
|
|
747
|
+
generate_source_tree_doc(result), encoding="utf-8"
|
|
748
|
+
)
|
|
749
|
+
(output_dir / "ai-guidance.md").write_text(
|
|
750
|
+
generate_ai_guidance_doc(result), encoding="utf-8"
|
|
751
|
+
)
|
|
752
|
+
|
|
753
|
+
return result
|