devflow-cli 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,167 @@
1
+ """Gestion du manifeste de templates devflow (checksums + comparaison)."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import hashlib
6
+ import json
7
+ from dataclasses import dataclass
8
+ from datetime import datetime, timezone
9
+ from pathlib import Path
10
+
11
+ from devflow_cli import VERSION
12
+
13
+
14
+ @dataclass
15
+ class TemplateStatus:
16
+ """Resultat de comparaison pour un template."""
17
+ path: str # Chemin relatif (ex: "templates/spec-template.md")
18
+ status: str # "current" | "outdated" | "modified" | "new" | "unknown"
19
+ source_hash: str | None = None
20
+ installed_hash: str | None = None
21
+ manifest_hash: str | None = None
22
+
23
+
24
+ def compute_file_hash(file_path: Path) -> str:
25
+ """Calcule le SHA-256 d'un fichier et retourne le digest hex."""
26
+ h = hashlib.sha256()
27
+ h.update(file_path.read_bytes())
28
+ return f"sha256:{h.hexdigest()}"
29
+
30
+
31
+ def compute_source_manifest(devflow_root: Path) -> dict:
32
+ """Calcule le manifeste des templates source dans le repo devflow.
33
+
34
+ Parcourt les memes categories que install_devflow() :
35
+ templates/, templates/export/, templates/checklists/,
36
+ templates/contracts/, scripts/*.sh, scripts/*.ps1
37
+ """
38
+ templates: dict[str, str] = {}
39
+
40
+ # Templates racine
41
+ tpl_dir = devflow_root / "templates"
42
+ if tpl_dir.is_dir():
43
+ for f in sorted(tpl_dir.glob("*")):
44
+ if f.is_file():
45
+ rel = f"templates/{f.name}"
46
+ templates[rel] = compute_file_hash(f)
47
+
48
+ # Sous-repertoires
49
+ for sub in ("export", "checklists", "contracts"):
50
+ sub_dir = tpl_dir / sub
51
+ if sub_dir.is_dir():
52
+ for f in sorted(sub_dir.glob("*")):
53
+ if f.is_file():
54
+ rel = f"templates/{sub}/{f.name}"
55
+ templates[rel] = compute_file_hash(f)
56
+
57
+ # Scripts
58
+ scripts_dir = devflow_root / "scripts"
59
+ if scripts_dir.is_dir():
60
+ for ext in ("*.sh", "*.ps1"):
61
+ for f in sorted(scripts_dir.glob(ext)):
62
+ if f.is_file():
63
+ rel = f"scripts/{f.name}"
64
+ templates[rel] = compute_file_hash(f)
65
+
66
+ return {
67
+ "devflow_version": VERSION,
68
+ "computed_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
69
+ "templates": templates,
70
+ }
71
+
72
+
73
+ def load_manifest(claude_dir: Path) -> dict | None:
74
+ """Charge le manifeste d'installation depuis claude_dir/.devflow-manifest.json.
75
+
76
+ Returns:
77
+ Le manifeste en dict, ou None si absent ou corrompu.
78
+ """
79
+ manifest_file = claude_dir / ".devflow-manifest.json"
80
+ if not manifest_file.is_file():
81
+ return None
82
+ try:
83
+ return json.loads(manifest_file.read_text(encoding="utf-8"))
84
+ except (json.JSONDecodeError, OSError) as e:
85
+ from devflow_cli.utils.console import warn
86
+ warn(f"Manifeste corrompu ou illisible : {manifest_file} ({type(e).__name__}: {e})")
87
+ return None
88
+
89
+
90
+ def save_manifest(claude_dir: Path, manifest_data: dict) -> None:
91
+ """Sauvegarde le manifeste d'installation dans claude_dir/.devflow-manifest.json."""
92
+ manifest_file = claude_dir / ".devflow-manifest.json"
93
+ manifest_file.write_text(
94
+ json.dumps(manifest_data, indent=2, ensure_ascii=False) + "\n",
95
+ encoding="utf-8",
96
+ )
97
+
98
+
99
+ def compare_manifests(
100
+ source_manifest: dict,
101
+ installed_manifest: dict | None,
102
+ claude_dir: Path,
103
+ ) -> list[TemplateStatus]:
104
+ """Compare le manifeste source avec l'installation courante.
105
+
106
+ Args:
107
+ source_manifest: Manifeste calcule depuis le repo devflow.
108
+ installed_manifest: Manifeste d'installation (ou None si legacy).
109
+ claude_dir: Repertoire ~/.claude/.
110
+
111
+ Returns:
112
+ Liste de TemplateStatus pour chaque template source.
113
+ """
114
+ results: list[TemplateStatus] = []
115
+ manifest_templates = (installed_manifest or {}).get("templates", {})
116
+
117
+ for rel_path, source_hash in sorted(source_manifest.get("templates", {}).items()):
118
+ installed_file = claude_dir / rel_path
119
+ manifest_hash = manifest_templates.get(rel_path)
120
+
121
+ if not installed_file.is_file():
122
+ results.append(TemplateStatus(
123
+ path=rel_path,
124
+ status="new",
125
+ source_hash=source_hash,
126
+ ))
127
+ continue
128
+
129
+ installed_hash = compute_file_hash(installed_file)
130
+
131
+ if source_hash == installed_hash:
132
+ results.append(TemplateStatus(
133
+ path=rel_path,
134
+ status="current",
135
+ source_hash=source_hash,
136
+ installed_hash=installed_hash,
137
+ manifest_hash=manifest_hash,
138
+ ))
139
+ elif installed_manifest is not None and manifest_hash is not None:
140
+ if manifest_hash == installed_hash:
141
+ # Fichier non modifie localement, mais source a change
142
+ results.append(TemplateStatus(
143
+ path=rel_path,
144
+ status="outdated",
145
+ source_hash=source_hash,
146
+ installed_hash=installed_hash,
147
+ manifest_hash=manifest_hash,
148
+ ))
149
+ else:
150
+ # Fichier modifie localement ET source a change
151
+ results.append(TemplateStatus(
152
+ path=rel_path,
153
+ status="modified",
154
+ source_hash=source_hash,
155
+ installed_hash=installed_hash,
156
+ manifest_hash=manifest_hash,
157
+ ))
158
+ else:
159
+ # Pas de manifeste → impossible de distinguer
160
+ results.append(TemplateStatus(
161
+ path=rel_path,
162
+ status="unknown",
163
+ source_hash=source_hash,
164
+ installed_hash=installed_hash,
165
+ ))
166
+
167
+ return results
@@ -0,0 +1,136 @@
1
+ """Detection stack, arborescence, conventions pour generation de contexte."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ from collections import Counter
7
+ from pathlib import Path
8
+
9
+ EXCLUDED_DIRS = {
10
+ "node_modules", ".git", "build", "dist", "__pycache__",
11
+ ".dart_tool", ".gradle", ".DS_Store", ".idea", ".vscode",
12
+ }
13
+
14
+ STACK_MARKERS: list[tuple[str, str]] = [
15
+ ("package.json", "Node.js"),
16
+ ("angular.json", "Angular"),
17
+ ("pom.xml", "Java/Maven"),
18
+ ("build.gradle", "Java/Gradle"),
19
+ ("pubspec.yaml", "Flutter/Dart"),
20
+ ("pyproject.toml", "Python"),
21
+ ("requirements.txt", "Python"),
22
+ ("go.mod", "Go"),
23
+ ("Cargo.toml", "Rust"),
24
+ ("tsconfig.json", "TypeScript"),
25
+ ]
26
+
27
+
28
+ def detect_stack(project_dir: Path) -> str:
29
+ """Detecte la stack technique du projet."""
30
+ stacks: list[str] = []
31
+ for marker, stack in STACK_MARKERS:
32
+ if (project_dir / marker).is_file():
33
+ if stack not in stacks:
34
+ stacks.append(stack)
35
+ return ", ".join(stacks) if stacks else "Non detectee"
36
+
37
+
38
+ def _should_exclude(path: Path) -> bool:
39
+ return any(part in EXCLUDED_DIRS for part in path.parts)
40
+
41
+
42
+ def build_tree(project_dir: Path, max_depth: int = 2, max_lines: int = 60) -> str:
43
+ """Genere l'arborescence du projet (2 niveaux)."""
44
+ lines: list[str] = []
45
+ base = project_dir.resolve()
46
+
47
+ for root_str, dirs, files in os.walk(base):
48
+ root_path = Path(root_str)
49
+ rel = root_path.relative_to(base)
50
+ depth = len(rel.parts)
51
+
52
+ if depth > max_depth:
53
+ dirs.clear()
54
+ continue
55
+
56
+ # Filtrer les repertoires exclus
57
+ dirs[:] = sorted(d for d in dirs if d not in EXCLUDED_DIRS)
58
+
59
+ rel_str = str(rel) if str(rel) != "." else ""
60
+ if rel_str:
61
+ lines.append(rel_str + "/")
62
+
63
+ if depth < max_depth:
64
+ for f in sorted(files):
65
+ if f == ".DS_Store":
66
+ continue
67
+ file_rel = str(rel / f) if rel_str else f
68
+ lines.append(file_rel)
69
+
70
+ if len(lines) >= max_lines:
71
+ break
72
+
73
+ return "\n".join(lines[:max_lines])
74
+
75
+
76
+ def detect_extensions(project_dir: Path) -> str:
77
+ """Top 5 extensions de fichiers par nombre."""
78
+ counter: Counter[str] = Counter()
79
+
80
+ for root_str, dirs, files in os.walk(project_dir):
81
+ root_path = Path(root_str)
82
+ if _should_exclude(root_path.relative_to(project_dir)):
83
+ dirs.clear()
84
+ continue
85
+ dirs[:] = [d for d in dirs if d not in EXCLUDED_DIRS]
86
+ for f in files:
87
+ ext = Path(f).suffix
88
+ if ext:
89
+ counter[ext] += 1
90
+
91
+ lines = []
92
+ for ext, count in counter.most_common(5):
93
+ lines.append(f"- {ext} ({count} fichiers)")
94
+ return "\n".join(lines) if lines else "Aucune extension detectee"
95
+
96
+
97
+ def detect_conventions(project_dir: Path) -> str:
98
+ """Detecte les conventions de code configurees."""
99
+ conventions: list[str] = []
100
+
101
+ if (project_dir / ".editorconfig").is_file():
102
+ conventions.append(".editorconfig")
103
+
104
+ for name in (".prettierrc", ".prettierrc.json", ".prettierrc.js"):
105
+ if (project_dir / name).is_file():
106
+ conventions.append("Prettier")
107
+ break
108
+
109
+ for name in (".eslintrc", ".eslintrc.json", ".eslintrc.js", "eslint.config.js"):
110
+ if (project_dir / name).is_file():
111
+ conventions.append("ESLint")
112
+ break
113
+
114
+ for name in (".stylelintrc", ".stylelintrc.json"):
115
+ if (project_dir / name).is_file():
116
+ conventions.append("Stylelint")
117
+ break
118
+
119
+ if (project_dir / "CLAUDE.md").is_file():
120
+ conventions.append("CLAUDE.md")
121
+
122
+ if (project_dir / ".cursor" / "rules").exists():
123
+ conventions.append("Cursor Rules")
124
+
125
+ if not conventions:
126
+ return "Aucune convention specifique detectee"
127
+ return "\n".join(f"- {c}" for c in conventions)
128
+
129
+
130
+ def read_constitution(project_dir: Path, max_lines: int = 20) -> str:
131
+ """Lit les premieres lignes de la constitution."""
132
+ for path in (project_dir / "docs" / "constitution.md", project_dir / "constitution.md"):
133
+ if path.is_file():
134
+ lines = path.read_text(encoding="utf-8").splitlines()[:max_lines]
135
+ return "\n".join(lines)
136
+ return "Aucune constitution devflow trouvee"
@@ -0,0 +1,135 @@
1
+ """Detection de la desynchronisation entre artefacts du pipeline."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+
9
+ # Graphe de dependances lineaire : (upstream, downstream)
10
+ ARTIFACT_DEPS: list[tuple[str, str]] = [
11
+ ("constitution", "spec.md"),
12
+ ("spec.md", "plan.md"),
13
+ ("plan.md", "tasks.md"),
14
+ ]
15
+
16
+ # Mapping artefact → commande speckit de regeneration
17
+ REGEN_COMMANDS: dict[str, str] = {
18
+ "plan.md": "/speckit.plan",
19
+ "tasks.md": "/speckit.tasks",
20
+ }
21
+
22
+
23
+ @dataclass
24
+ class StalenessInfo:
25
+ artifact: str
26
+ state: str # "fresh", "stale", "missing"
27
+ reason: str
28
+ upstream: str
29
+ mtime: float | None
30
+
31
+
32
+ def _get_mtime(path: Path) -> float | None:
33
+ """Retourne le mtime d'un fichier ou None si absent."""
34
+ if path.is_file():
35
+ return path.stat().st_mtime
36
+ return None
37
+
38
+
39
+ def get_staleness(
40
+ feature_dir: Path,
41
+ constitution_path: Path | None = None,
42
+ ) -> dict[str, StalenessInfo]:
43
+ """Calcule l'etat de synchronisation de chaque artefact downstream.
44
+
45
+ Args:
46
+ feature_dir: Repertoire contenant les artefacts (specs/<branch>/ ou docs/features/<id>/).
47
+ constitution_path: Chemin absolu vers la constitution (.specify/memory/constitution.md).
48
+
49
+ Returns:
50
+ Dict {artifact_name: StalenessInfo} pour chaque downstream dans ARTIFACT_DEPS.
51
+ """
52
+ result: dict[str, StalenessInfo] = {}
53
+ stale_names: set[str] = set()
54
+
55
+ for upstream_name, downstream_name in ARTIFACT_DEPS:
56
+ # Si l'upstream est deja stale, le downstream est stale par propagation
57
+ if upstream_name in stale_names:
58
+ downstream_path = feature_dir / downstream_name
59
+ downstream_mtime = _get_mtime(downstream_path)
60
+ if downstream_mtime is not None:
61
+ result[downstream_name] = StalenessInfo(
62
+ artifact=downstream_name,
63
+ state="stale",
64
+ reason=f"{upstream_name} est desuet (cascade)",
65
+ upstream=upstream_name,
66
+ mtime=downstream_mtime,
67
+ )
68
+ stale_names.add(downstream_name)
69
+ else:
70
+ result[downstream_name] = StalenessInfo(
71
+ artifact=downstream_name,
72
+ state="missing",
73
+ reason=f"{downstream_name} absent",
74
+ upstream=upstream_name,
75
+ mtime=None,
76
+ )
77
+ continue
78
+
79
+ # Resoudre le chemin upstream
80
+ if upstream_name == "constitution":
81
+ if constitution_path is None or not constitution_path.is_file():
82
+ continue
83
+ upstream_mtime = constitution_path.stat().st_mtime
84
+ else:
85
+ upstream_path = feature_dir / upstream_name
86
+ upstream_mtime = _get_mtime(upstream_path)
87
+ if upstream_mtime is None:
88
+ continue # Upstream absent → pas de comparaison possible
89
+
90
+ # Resoudre le chemin downstream
91
+ downstream_path = feature_dir / downstream_name
92
+ downstream_mtime = _get_mtime(downstream_path)
93
+
94
+ if downstream_mtime is None:
95
+ result[downstream_name] = StalenessInfo(
96
+ artifact=downstream_name,
97
+ state="missing",
98
+ reason=f"{downstream_name} absent",
99
+ upstream=upstream_name,
100
+ mtime=None,
101
+ )
102
+ elif upstream_mtime > downstream_mtime:
103
+ result[downstream_name] = StalenessInfo(
104
+ artifact=downstream_name,
105
+ state="stale",
106
+ reason=f"{upstream_name} modifie apres {downstream_name}",
107
+ upstream=upstream_name,
108
+ mtime=downstream_mtime,
109
+ )
110
+ stale_names.add(downstream_name)
111
+ else:
112
+ if downstream_name not in result:
113
+ result[downstream_name] = StalenessInfo(
114
+ artifact=downstream_name,
115
+ state="fresh",
116
+ reason="",
117
+ upstream=upstream_name,
118
+ mtime=downstream_mtime,
119
+ )
120
+
121
+ return result
122
+
123
+
124
+ def get_stale_artifacts(
125
+ feature_dir: Path,
126
+ constitution_path: Path | None = None,
127
+ ) -> list[StalenessInfo]:
128
+ """Retourne uniquement les artefacts stale, ordonnes par pipeline."""
129
+ staleness = get_staleness(feature_dir, constitution_path)
130
+ # Ordre du pipeline
131
+ pipeline_order = [ds for _, ds in ARTIFACT_DEPS]
132
+ return [
133
+ info for name in pipeline_order
134
+ if (info := staleness.get(name)) and info.state == "stale"
135
+ ]
@@ -0,0 +1,69 @@
1
+ """Lecture/ecriture de state.json pour les features devflow."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from dataclasses import dataclass, field, asdict
7
+ from datetime import datetime, timezone
8
+ from pathlib import Path
9
+
10
+ from devflow_cli import STEPS
11
+
12
+
13
+ @dataclass
14
+ class FeatureState:
15
+ issueId: str
16
+ currentStep: str = "spec"
17
+ completedSteps: list[str] = field(default_factory=list)
18
+ stepTimestamps: dict[str, dict[str, str]] = field(default_factory=dict)
19
+ reviewIterations: dict[str, int] = field(default_factory=dict)
20
+ createdAt: str = ""
21
+ updatedAt: str = ""
22
+
23
+ def __post_init__(self) -> None:
24
+ now = _now_iso()
25
+ if not self.createdAt:
26
+ self.createdAt = now
27
+ if not self.updatedAt:
28
+ self.updatedAt = now
29
+
30
+ @property
31
+ def completed_count(self) -> int:
32
+ return len(self.completedSteps)
33
+
34
+ @property
35
+ def total_steps(self) -> int:
36
+ return len(STEPS)
37
+
38
+ def to_dict(self) -> dict:
39
+ return asdict(self)
40
+
41
+
42
+ def _now_iso() -> str:
43
+ return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
44
+
45
+
46
+ def read_state(state_file: Path) -> FeatureState:
47
+ """Lit un state.json et retourne un FeatureState."""
48
+ data = json.loads(state_file.read_text(encoding="utf-8"))
49
+ return FeatureState(**{
50
+ k: v for k, v in data.items()
51
+ if k in FeatureState.__dataclass_fields__
52
+ })
53
+
54
+
55
+ def write_state(state_file: Path, state: FeatureState) -> None:
56
+ """Ecrit un FeatureState dans state.json."""
57
+ state.updatedAt = _now_iso()
58
+ state_file.write_text(
59
+ json.dumps(state.to_dict(), indent=2, ensure_ascii=False) + "\n",
60
+ encoding="utf-8",
61
+ )
62
+
63
+
64
+ def create_state(state_file: Path, issue_id: str) -> FeatureState:
65
+ """Cree un nouveau state.json pour une feature."""
66
+ state = FeatureState(issueId=issue_id)
67
+ state_file.parent.mkdir(parents=True, exist_ok=True)
68
+ write_state(state_file, state)
69
+ return state
File without changes
@@ -0,0 +1,34 @@
1
+ """Rich console helpers."""
2
+
3
+ from rich.console import Console
4
+ from rich.table import Table
5
+
6
+ console = Console()
7
+
8
+
9
+ def ok(msg: str) -> None:
10
+ console.print(f"[green][OK][/green] {msg}")
11
+
12
+
13
+ def warn(msg: str) -> None:
14
+ console.print(f"[yellow][WARN][/yellow] {msg}")
15
+
16
+
17
+ def fail(msg: str) -> None:
18
+ console.print(f"[red][FAIL][/red] {msg}")
19
+
20
+
21
+ def info(msg: str) -> None:
22
+ console.print(f"[blue][INFO][/blue] {msg}")
23
+
24
+
25
+ def header(title: str) -> None:
26
+ console.rule(f"[bold]{title}[/bold]")
27
+
28
+
29
+ def make_table(*columns: str, title: str | None = None) -> Table:
30
+ """Cree une table Rich avec les colonnes specifiees."""
31
+ table = Table(title=title, show_header=True, header_style="bold")
32
+ for col in columns:
33
+ table.add_column(col)
34
+ return table
@@ -0,0 +1,141 @@
1
+ """Resolution de chemins devflow."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import re
7
+ import subprocess
8
+ from pathlib import Path
9
+
10
+
11
+ def get_devflow_root() -> Path:
12
+ """Racine du projet devflow (templates, commands, agents, extensions)."""
13
+ if root := os.environ.get("DEVFLOW_ROOT"):
14
+ return Path(root)
15
+ # En dev: src/devflow_cli/utils/paths.py -> remonter 4 niveaux
16
+ return Path(__file__).resolve().parent.parent.parent.parent
17
+
18
+
19
+ def get_claude_dir() -> Path:
20
+ """Repertoire ~/.claude/ (ou CLAUDE_DIR si defini)."""
21
+ return Path(os.environ.get("CLAUDE_DIR", Path.home() / ".claude"))
22
+
23
+
24
+ def get_feature_dir(issue_id: str, project_dir: Path | None = None) -> Path:
25
+ """Deprecated: utiliser resolve_feature_dir() a la place.
26
+
27
+ Repertoire d'une feature: docs/features/<issue_id>/.
28
+ Conserve pour retrocompatibilite uniquement.
29
+ """
30
+ base = project_dir or Path.cwd()
31
+ return base / "docs" / "features" / issue_id
32
+
33
+
34
+ def get_specs_root(project_dir: Path | None = None) -> Path:
35
+ """Repertoire central des artefacts de feature: .specify/specs/."""
36
+ base = project_dir or Path.cwd()
37
+ return base / ".specify" / "specs"
38
+
39
+
40
+ def resolve_feature_dir(
41
+ identifier: str,
42
+ project_dir: Path | None = None,
43
+ ) -> Path | None:
44
+ """Resout le repertoire d'une feature avec fallback triple.
45
+
46
+ Ordre de resolution:
47
+ 1. .specify/specs/<identifier ou match>/
48
+ 2. specs/<identifier ou match>/
49
+ 3. docs/features/<identifier>/
50
+
51
+ Args:
52
+ identifier: Nom de branche, issue ID, ou pattern a chercher.
53
+ project_dir: Racine du projet (defaut: cwd).
54
+
55
+ Returns:
56
+ Path vers le repertoire de la feature, ou None si introuvable.
57
+ """
58
+ base = project_dir or Path.cwd()
59
+
60
+ search_roots = [
61
+ base / ".specify" / "specs",
62
+ base / "specs",
63
+ base / "docs" / "features",
64
+ ]
65
+
66
+ for root in search_roots:
67
+ if not root.is_dir():
68
+ continue
69
+ # Match exact
70
+ exact = root / identifier
71
+ if exact.is_dir():
72
+ return exact
73
+ # Match partiel (ex: identifier "KS-123" dans "005-ks-123")
74
+ for d in root.iterdir():
75
+ if d.is_dir() and identifier.lower() in d.name.lower():
76
+ return d
77
+
78
+ return None
79
+
80
+
81
+ def next_feature_number(project_dir: Path | None = None) -> int:
82
+ """Determine le prochain numero de feature disponible.
83
+
84
+ Scanne .specify/specs/, specs/ et les branches git pour trouver
85
+ le plus grand numero existant, puis retourne le suivant.
86
+ Utilise un verrou fichier (fcntl.flock) pour eviter les race conditions.
87
+ """
88
+ base = project_dir or Path.cwd()
89
+
90
+ def _scan() -> int:
91
+ highest = 0
92
+
93
+ # Scanner les repertoires de specs
94
+ for specs_dir in [base / ".specify" / "specs", base / "specs"]:
95
+ if not specs_dir.is_dir():
96
+ continue
97
+ for d in specs_dir.iterdir():
98
+ if not d.is_dir():
99
+ continue
100
+ m = re.match(r"^(\d+)", d.name)
101
+ if m:
102
+ num = int(m.group(1))
103
+ if num > highest:
104
+ highest = num
105
+
106
+ # Scanner les branches git
107
+ try:
108
+ result = subprocess.run(
109
+ ["git", "branch", "-a"],
110
+ capture_output=True,
111
+ text=True,
112
+ cwd=str(base),
113
+ timeout=5,
114
+ )
115
+ if result.returncode == 0:
116
+ for line in result.stdout.splitlines():
117
+ clean = line.strip().lstrip("* ").split("/")[-1]
118
+ m = re.match(r"^(\d{3})-", clean)
119
+ if m:
120
+ num = int(m.group(1))
121
+ if num > highest:
122
+ highest = num
123
+ except (subprocess.TimeoutExpired, FileNotFoundError):
124
+ pass
125
+
126
+ return highest + 1
127
+
128
+ # Verrouillage fichier (POSIX) pour eviter les race conditions
129
+ try:
130
+ import fcntl
131
+
132
+ lock_dir = base / ".specify" / "specs"
133
+ lock_dir.mkdir(parents=True, exist_ok=True)
134
+ lock_file = lock_dir / ".feature-lock"
135
+
136
+ with open(lock_file, "a+") as f:
137
+ fcntl.flock(f, fcntl.LOCK_EX)
138
+ return _scan()
139
+ except ImportError:
140
+ # Windows : pas de fcntl, on execute sans lock
141
+ return _scan()