scc-cli 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of scc-cli might be problematic. Click here for more details.

Files changed (113) hide show
  1. scc_cli/__init__.py +15 -0
  2. scc_cli/audit/__init__.py +37 -0
  3. scc_cli/audit/parser.py +191 -0
  4. scc_cli/audit/reader.py +180 -0
  5. scc_cli/auth.py +145 -0
  6. scc_cli/claude_adapter.py +485 -0
  7. scc_cli/cli.py +259 -0
  8. scc_cli/cli_admin.py +706 -0
  9. scc_cli/cli_audit.py +245 -0
  10. scc_cli/cli_common.py +166 -0
  11. scc_cli/cli_config.py +527 -0
  12. scc_cli/cli_exceptions.py +705 -0
  13. scc_cli/cli_helpers.py +244 -0
  14. scc_cli/cli_init.py +272 -0
  15. scc_cli/cli_launch.py +1454 -0
  16. scc_cli/cli_org.py +1428 -0
  17. scc_cli/cli_support.py +322 -0
  18. scc_cli/cli_team.py +892 -0
  19. scc_cli/cli_worktree.py +865 -0
  20. scc_cli/config.py +583 -0
  21. scc_cli/console.py +562 -0
  22. scc_cli/constants.py +79 -0
  23. scc_cli/contexts.py +377 -0
  24. scc_cli/deprecation.py +54 -0
  25. scc_cli/deps.py +189 -0
  26. scc_cli/docker/__init__.py +127 -0
  27. scc_cli/docker/core.py +466 -0
  28. scc_cli/docker/credentials.py +726 -0
  29. scc_cli/docker/launch.py +604 -0
  30. scc_cli/doctor/__init__.py +99 -0
  31. scc_cli/doctor/checks.py +1074 -0
  32. scc_cli/doctor/render.py +346 -0
  33. scc_cli/doctor/types.py +66 -0
  34. scc_cli/errors.py +288 -0
  35. scc_cli/evaluation/__init__.py +27 -0
  36. scc_cli/evaluation/apply_exceptions.py +207 -0
  37. scc_cli/evaluation/evaluate.py +97 -0
  38. scc_cli/evaluation/models.py +80 -0
  39. scc_cli/exit_codes.py +55 -0
  40. scc_cli/git.py +1521 -0
  41. scc_cli/json_command.py +166 -0
  42. scc_cli/json_output.py +96 -0
  43. scc_cli/kinds.py +62 -0
  44. scc_cli/marketplace/__init__.py +123 -0
  45. scc_cli/marketplace/adapter.py +74 -0
  46. scc_cli/marketplace/compute.py +377 -0
  47. scc_cli/marketplace/constants.py +87 -0
  48. scc_cli/marketplace/managed.py +135 -0
  49. scc_cli/marketplace/materialize.py +723 -0
  50. scc_cli/marketplace/normalize.py +548 -0
  51. scc_cli/marketplace/render.py +257 -0
  52. scc_cli/marketplace/resolve.py +459 -0
  53. scc_cli/marketplace/schema.py +506 -0
  54. scc_cli/marketplace/sync.py +260 -0
  55. scc_cli/marketplace/team_cache.py +195 -0
  56. scc_cli/marketplace/team_fetch.py +688 -0
  57. scc_cli/marketplace/trust.py +244 -0
  58. scc_cli/models/__init__.py +41 -0
  59. scc_cli/models/exceptions.py +273 -0
  60. scc_cli/models/plugin_audit.py +434 -0
  61. scc_cli/org_templates.py +269 -0
  62. scc_cli/output_mode.py +167 -0
  63. scc_cli/panels.py +113 -0
  64. scc_cli/platform.py +350 -0
  65. scc_cli/profiles.py +960 -0
  66. scc_cli/remote.py +443 -0
  67. scc_cli/schemas/__init__.py +1 -0
  68. scc_cli/schemas/org-v1.schema.json +456 -0
  69. scc_cli/schemas/team-config.v1.schema.json +163 -0
  70. scc_cli/sessions.py +425 -0
  71. scc_cli/setup.py +588 -0
  72. scc_cli/source_resolver.py +470 -0
  73. scc_cli/stats.py +378 -0
  74. scc_cli/stores/__init__.py +13 -0
  75. scc_cli/stores/exception_store.py +251 -0
  76. scc_cli/subprocess_utils.py +88 -0
  77. scc_cli/teams.py +382 -0
  78. scc_cli/templates/__init__.py +2 -0
  79. scc_cli/templates/org/__init__.py +0 -0
  80. scc_cli/templates/org/minimal.json +19 -0
  81. scc_cli/templates/org/reference.json +74 -0
  82. scc_cli/templates/org/strict.json +38 -0
  83. scc_cli/templates/org/teams.json +42 -0
  84. scc_cli/templates/statusline.sh +75 -0
  85. scc_cli/theme.py +348 -0
  86. scc_cli/ui/__init__.py +124 -0
  87. scc_cli/ui/branding.py +68 -0
  88. scc_cli/ui/chrome.py +395 -0
  89. scc_cli/ui/dashboard/__init__.py +62 -0
  90. scc_cli/ui/dashboard/_dashboard.py +677 -0
  91. scc_cli/ui/dashboard/loaders.py +395 -0
  92. scc_cli/ui/dashboard/models.py +184 -0
  93. scc_cli/ui/dashboard/orchestrator.py +390 -0
  94. scc_cli/ui/formatters.py +443 -0
  95. scc_cli/ui/gate.py +350 -0
  96. scc_cli/ui/help.py +157 -0
  97. scc_cli/ui/keys.py +538 -0
  98. scc_cli/ui/list_screen.py +431 -0
  99. scc_cli/ui/picker.py +700 -0
  100. scc_cli/ui/prompts.py +200 -0
  101. scc_cli/ui/wizard.py +675 -0
  102. scc_cli/update.py +680 -0
  103. scc_cli/utils/__init__.py +39 -0
  104. scc_cli/utils/fixit.py +264 -0
  105. scc_cli/utils/fuzzy.py +124 -0
  106. scc_cli/utils/locks.py +101 -0
  107. scc_cli/utils/ttl.py +376 -0
  108. scc_cli/validate.py +455 -0
  109. scc_cli-1.4.1.dist-info/METADATA +369 -0
  110. scc_cli-1.4.1.dist-info/RECORD +113 -0
  111. scc_cli-1.4.1.dist-info/WHEEL +4 -0
  112. scc_cli-1.4.1.dist-info/entry_points.txt +2 -0
  113. scc_cli-1.4.1.dist-info/licenses/LICENSE +21 -0
scc_cli/contexts.py ADDED
@@ -0,0 +1,377 @@
1
+ """Work context tracking for multi-team, multi-project workflows.
2
+
3
+ A WorkContext represents the developer's "working unit": team + repo + worktree.
4
+ This module tracks recent contexts to enable quick switching between projects
5
+ without requiring multiple manual steps (team switch → worktree → session).
6
+
7
+ The contexts are stored in ~/.cache/scc/contexts.json with a versioned schema:
8
+ {"version": 1, "contexts": [...]}
9
+
10
+ Writes are atomic (temp file + rename) for safety.
11
+
12
+ Note: Concurrent writes use "last writer wins" semantics. For most CLI usage
13
+ patterns, this is fine since operations are user-initiated and sequential.
14
+
15
+ Example usage:
16
+ # Record a context when starting work
17
+ ctx = WorkContext(
18
+ team="platform",
19
+ repo_root=Path("/code/api-service"),
20
+ worktree_path=Path("/code/api-service"),
21
+ worktree_name="main",
22
+ )
23
+ record_context(ctx)
24
+
25
+ # Get recent contexts for display
26
+ recent = load_recent_contexts(limit=10)
27
+ """
28
+
29
+ from __future__ import annotations
30
+
31
+ import json
32
+ import os
33
+ import tempfile
34
+ from dataclasses import dataclass, field
35
+ from datetime import datetime, timezone
36
+ from pathlib import Path
37
+ from typing import Any
38
+
39
+ from .utils.locks import file_lock, lock_path
40
+
41
+ # Schema version for future migration support
42
+ SCHEMA_VERSION = 1
43
+
44
+ # Maximum number of contexts to keep in history
45
+ MAX_CONTEXTS = 30
46
+
47
+
48
+ def _parse_dt(s: str) -> datetime:
49
+ """Parse ISO datetime string, with fallback for malformed values."""
50
+ try:
51
+ # Handle Z suffix and standard ISO format
52
+ return datetime.fromisoformat(s.replace("Z", "+00:00"))
53
+ except (ValueError, TypeError):
54
+ return datetime.fromtimestamp(0, tz=timezone.utc)
55
+
56
+
57
+ def normalize_path(p: str | Path) -> Path:
58
+ """Normalize a path for consistent comparison.
59
+
60
+ Uses strict=False to avoid errors on non-existent paths while still
61
+ resolving symlinks. Falls back to absolute() on OSError.
62
+ """
63
+ path = Path(p).expanduser()
64
+ try:
65
+ return path.resolve(strict=False)
66
+ except OSError:
67
+ # Fall back to absolute without resolving symlinks
68
+ return path.absolute()
69
+
70
+
71
+ @dataclass
72
+ class WorkContext:
73
+ """A developer's working context (team + repo + worktree).
74
+
75
+ This is the primary unit of work switching in SCC. Instead of thinking
76
+ about "sessions" and "workspaces" separately, we track the full context
77
+ that a developer was working in.
78
+
79
+ Attributes:
80
+ team: The team profile name (e.g., "platform", "data"), or None for standalone mode.
81
+ repo_root: Absolute path to the repository root.
82
+ worktree_path: Absolute path to the worktree (may equal repo_root for main).
83
+ worktree_name: Directory name of the worktree (stable identifier).
84
+ branch: Git branch name at time of last use (metadata, can change).
85
+ last_session_id: Optional session ID from last work in this context.
86
+ last_used: When this context was last used (ISO format string).
87
+ pinned: Whether this context is pinned to the top of the list.
88
+
89
+ Note:
90
+ worktree_name is the directory name (stable), while branch is metadata
91
+ that can change. Display uses branch (if available) with worktree_name
92
+ as fallback. This prevents context records from becoming "lost" when
93
+ a user switches branches within the same worktree.
94
+ """
95
+
96
+ team: str | None
97
+ repo_root: Path
98
+ worktree_path: Path
99
+ worktree_name: str
100
+ branch: str | None = None
101
+ last_session_id: str | None = None
102
+ last_used: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
103
+ pinned: bool = False
104
+
105
+ @property
106
+ def repo_name(self) -> str:
107
+ """Extract repository name from path."""
108
+ return self.repo_root.name
109
+
110
+ @property
111
+ def team_label(self) -> str:
112
+ """Return team name or 'standalone' for display."""
113
+ return self.team if self.team else "standalone"
114
+
115
+ @property
116
+ def display_label(self) -> str:
117
+ """Format for display in lists: 'team · repo · branch/worktree'.
118
+
119
+ Uses branch name if available, otherwise falls back to worktree directory name.
120
+ This provides meaningful labels (branch names) while maintaining stability
121
+ (directory names don't change when branches switch).
122
+ """
123
+ name = self.branch or self.worktree_name
124
+ return f"{self.team_label} · {self.repo_name} · {name}"
125
+
126
+ @property
127
+ def unique_key(self) -> tuple[str | None, Path, Path]:
128
+ """Unique identifier for deduplication: (team, repo_root, worktree_path)."""
129
+ return (self.team, self.repo_root, self.worktree_path)
130
+
131
+ def to_dict(self) -> dict[str, Any]:
132
+ """Convert to dictionary for JSON serialization."""
133
+ return {
134
+ "team": self.team,
135
+ "repo_root": str(self.repo_root),
136
+ "worktree_path": str(self.worktree_path),
137
+ "worktree_name": self.worktree_name,
138
+ "branch": self.branch,
139
+ "last_session_id": self.last_session_id,
140
+ "last_used": self.last_used,
141
+ "pinned": self.pinned,
142
+ }
143
+
144
+ @classmethod
145
+ def from_dict(cls, data: dict[str, Any]) -> WorkContext:
146
+ """Create from dictionary (JSON deserialization).
147
+
148
+ Handles backward compatibility for contexts without branch field.
149
+ """
150
+ return cls(
151
+ team=data["team"],
152
+ repo_root=normalize_path(data["repo_root"]),
153
+ worktree_path=normalize_path(data["worktree_path"]),
154
+ worktree_name=data["worktree_name"],
155
+ branch=data.get("branch"), # Optional, may not exist in old records
156
+ last_session_id=data.get("last_session_id"),
157
+ last_used=data.get("last_used", datetime.now(timezone.utc).isoformat()),
158
+ pinned=data.get("pinned", False),
159
+ )
160
+
161
+
162
+ def _get_contexts_path() -> Path:
163
+ """Get path to contexts cache file."""
164
+ cache_dir = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache")) / "scc"
165
+ cache_dir.mkdir(parents=True, exist_ok=True)
166
+ return cache_dir / "contexts.json"
167
+
168
+
169
+ def _load_contexts_raw() -> list[dict[str, Any]]:
170
+ """Load raw context data from disk."""
171
+ path = _get_contexts_path()
172
+ if not path.exists():
173
+ return []
174
+ try:
175
+ with path.open(encoding="utf-8") as f:
176
+ data = json.load(f)
177
+ # Handle versioned schema
178
+ if isinstance(data, dict) and "contexts" in data:
179
+ contexts = data["contexts"]
180
+ if isinstance(contexts, list):
181
+ return contexts
182
+ return []
183
+ # Legacy: raw list (migrate on next write)
184
+ if isinstance(data, list):
185
+ return data
186
+ return []
187
+ except (json.JSONDecodeError, OSError):
188
+ return []
189
+
190
+
191
+ def _save_contexts_raw(contexts: list[dict[str, Any]]) -> None:
192
+ """Save context data to disk atomically (temp file + rename)."""
193
+ path = _get_contexts_path()
194
+ path.parent.mkdir(parents=True, exist_ok=True)
195
+
196
+ # Versioned schema
197
+ data = {"version": SCHEMA_VERSION, "contexts": contexts}
198
+
199
+ # Write to temp file then rename for atomicity
200
+ fd, temp_path = tempfile.mkstemp(dir=path.parent, suffix=".tmp")
201
+ try:
202
+ with os.fdopen(fd, "w", encoding="utf-8") as f:
203
+ json.dump(data, f, indent=2)
204
+ os.replace(temp_path, path)
205
+ except Exception:
206
+ # Clean up temp file on failure
207
+ if os.path.exists(temp_path):
208
+ os.unlink(temp_path)
209
+ raise
210
+
211
+
212
+ def load_recent_contexts(limit: int = 10) -> list[WorkContext]:
213
+ """Load recent contexts, sorted by pinned first then recency.
214
+
215
+ Args:
216
+ limit: Maximum number of contexts to return.
217
+
218
+ Returns:
219
+ List of WorkContext objects, pinned first, then by last_used descending.
220
+ """
221
+ raw_data = _load_contexts_raw()
222
+ contexts = [WorkContext.from_dict(d) for d in raw_data]
223
+
224
+ # Sort: pinned=True first (True > False with reverse=True),
225
+ # then by timestamp descending (larger = more recent)
226
+ contexts.sort(key=lambda c: (c.pinned, _parse_dt(c.last_used)), reverse=True)
227
+
228
+ return contexts[:limit]
229
+
230
+
231
+ def _merge_contexts(existing: WorkContext, incoming: WorkContext) -> WorkContext:
232
+ """Merge incoming context update with existing context.
233
+
234
+ Preserves pinned status, updates timestamps, session info, and branch.
235
+ """
236
+ return WorkContext(
237
+ team=incoming.team,
238
+ repo_root=incoming.repo_root,
239
+ worktree_path=incoming.worktree_path,
240
+ worktree_name=incoming.worktree_name,
241
+ branch=incoming.branch or existing.branch, # Prefer new, fallback to existing
242
+ last_session_id=incoming.last_session_id or existing.last_session_id,
243
+ last_used=datetime.now(timezone.utc).isoformat(),
244
+ pinned=existing.pinned, # Preserve pinned status
245
+ )
246
+
247
+
248
+ def record_context(context: WorkContext) -> None:
249
+ """Record a context, updating if it already exists.
250
+
251
+ If a context with the same (team, repo_root, worktree_path) exists,
252
+ it's updated with new last_used and last_session_id.
253
+
254
+ Note: This function does not mutate the input context.
255
+
256
+ Args:
257
+ context: The context to record.
258
+ """
259
+ lock_file = lock_path("contexts")
260
+ with file_lock(lock_file):
261
+ raw_data = _load_contexts_raw()
262
+ existing = [WorkContext.from_dict(d) for d in raw_data]
263
+
264
+ # Normalize the incoming context paths
265
+ normalized = WorkContext(
266
+ team=context.team,
267
+ repo_root=normalize_path(context.repo_root),
268
+ worktree_path=normalize_path(context.worktree_path),
269
+ worktree_name=context.worktree_name,
270
+ branch=context.branch, # Preserve branch for Quick Resume display
271
+ last_session_id=context.last_session_id,
272
+ last_used=datetime.now(timezone.utc).isoformat(),
273
+ pinned=context.pinned,
274
+ )
275
+
276
+ # Find and update or append
277
+ key = normalized.unique_key
278
+ found = False
279
+ for i, ctx in enumerate(existing):
280
+ if ctx.unique_key == key:
281
+ existing[i] = _merge_contexts(ctx, normalized)
282
+ found = True
283
+ break
284
+
285
+ if not found:
286
+ existing.append(normalized)
287
+
288
+ # Sort by recency and trim to MAX_CONTEXTS
289
+ # Keep pinned contexts even if they're old
290
+ pinned = [c for c in existing if c.pinned]
291
+ unpinned = [c for c in existing if not c.pinned]
292
+
293
+ # Sort both lists by recency for consistent ordering
294
+ pinned.sort(key=lambda c: _parse_dt(c.last_used), reverse=True)
295
+ unpinned.sort(key=lambda c: _parse_dt(c.last_used), reverse=True)
296
+
297
+ # Trim unpinned to fit within MAX_CONTEXTS (minus pinned count)
298
+ max_unpinned = MAX_CONTEXTS - len(pinned)
299
+ if max_unpinned < 0:
300
+ max_unpinned = 0
301
+ unpinned = unpinned[:max_unpinned]
302
+
303
+ final = pinned + unpinned
304
+ _save_contexts_raw([c.to_dict() for c in final])
305
+
306
+
307
+ def toggle_pin(team: str, repo_root: str | Path, worktree_path: str | Path) -> bool | None:
308
+ """Toggle the pinned status of a context.
309
+
310
+ Args:
311
+ team: Team name.
312
+ repo_root: Repository root path.
313
+ worktree_path: Worktree path.
314
+
315
+ Returns:
316
+ New pinned status (True if now pinned, False if unpinned),
317
+ or None if context not found.
318
+ """
319
+ lock_file = lock_path("contexts")
320
+ with file_lock(lock_file):
321
+ # Load all contexts as WorkContext objects (normalizes paths once)
322
+ contexts = [WorkContext.from_dict(d) for d in _load_contexts_raw()]
323
+ key = (team, normalize_path(repo_root), normalize_path(worktree_path))
324
+
325
+ for i, ctx in enumerate(contexts):
326
+ if ctx.unique_key == key:
327
+ # Create new context with toggled pinned status
328
+ contexts[i] = WorkContext(
329
+ team=ctx.team,
330
+ repo_root=ctx.repo_root,
331
+ worktree_path=ctx.worktree_path,
332
+ worktree_name=ctx.worktree_name,
333
+ branch=ctx.branch, # Preserve branch metadata
334
+ last_session_id=ctx.last_session_id,
335
+ last_used=ctx.last_used,
336
+ pinned=not ctx.pinned,
337
+ )
338
+ _save_contexts_raw([c.to_dict() for c in contexts])
339
+ return contexts[i].pinned
340
+
341
+ return None
342
+
343
+
344
+ def clear_contexts() -> int:
345
+ """Clear all contexts from cache.
346
+
347
+ Returns:
348
+ Number of contexts cleared.
349
+ """
350
+ lock_file = lock_path("contexts")
351
+ with file_lock(lock_file):
352
+ raw_data = _load_contexts_raw()
353
+ count = len(raw_data)
354
+ _save_contexts_raw([])
355
+ return count
356
+
357
+
358
+ def get_context_for_path(worktree_path: str | Path, team: str | None = None) -> WorkContext | None:
359
+ """Find a context matching the given worktree path.
360
+
361
+ Uses normalized path comparison for robustness.
362
+
363
+ Args:
364
+ worktree_path: The worktree path to search for.
365
+ team: Optional team filter.
366
+
367
+ Returns:
368
+ Matching context or None.
369
+ """
370
+ normalized = normalize_path(worktree_path)
371
+ contexts = load_recent_contexts(limit=MAX_CONTEXTS)
372
+
373
+ for ctx in contexts:
374
+ if ctx.worktree_path == normalized:
375
+ if team is None or ctx.team == team:
376
+ return ctx
377
+ return None
scc_cli/deprecation.py ADDED
@@ -0,0 +1,54 @@
1
+ """Provide deprecation warning infrastructure.
2
+
3
+ Provide consistent deprecation warnings that respect output modes.
4
+ Suppress warnings in JSON mode to maintain clean machine output.
5
+
6
+ Usage:
7
+ from scc_cli.deprecation import warn_deprecated
8
+
9
+ # In command handler:
10
+ warn_deprecated("old-cmd", "new-cmd", remove_version="2.0")
11
+ """
12
+
13
+ import os
14
+
15
+ from rich.console import Console
16
+
17
+ from .output_mode import is_json_mode
18
+
19
+ # Stderr console for deprecation warnings
20
+ _stderr_console = Console(stderr=True)
21
+
22
+ # ═══════════════════════════════════════════════════════════════════════════════
23
+ # Deprecation Warnings
24
+ # ═══════════════════════════════════════════════════════════════════════════════
25
+
26
+
27
+ def warn_deprecated(
28
+ old_cmd: str,
29
+ new_cmd: str,
30
+ remove_version: str = "2.0",
31
+ ) -> None:
32
+ """Print deprecation warning to stderr.
33
+
34
+ Warnings are suppressed when:
35
+ - JSON output mode is active (clean machine output)
36
+ - SCC_NO_DEPRECATION_WARN=1 environment variable is set
37
+
38
+ Args:
39
+ old_cmd: The deprecated command/option name
40
+ new_cmd: The replacement command/option name
41
+ remove_version: The version when old_cmd will be removed
42
+ """
43
+ # Suppress in JSON mode for clean machine output
44
+ if is_json_mode():
45
+ return
46
+
47
+ # Allow users to suppress deprecation warnings
48
+ if os.environ.get("SCC_NO_DEPRECATION_WARN") == "1":
49
+ return
50
+
51
+ _stderr_console.print(
52
+ f"[yellow]DEPRECATION:[/yellow] '{old_cmd}' is deprecated. "
53
+ f"Use '{new_cmd}' instead. Will be removed in v{remove_version}."
54
+ )
scc_cli/deps.py ADDED
@@ -0,0 +1,189 @@
1
+ """
2
+ Provide dependency detection and installation for project workspaces.
3
+
4
+ Offer opt-in dependency installation that:
5
+ - Is opt-in (--install-deps flag)
6
+ - Never blocks scc start by default
7
+ - Supports strict mode for CI/automation that needs hard failures
8
+
9
+ Supported package managers:
10
+ - JavaScript: npm, pnpm, yarn, bun
11
+ - Python: poetry, uv, pip
12
+ - Java: maven, gradle
13
+ """
14
+
15
+ import subprocess
16
+ from pathlib import Path
17
+
18
+ # ═══════════════════════════════════════════════════════════════════════════════
19
+ # Exception Classes
20
+ # ═══════════════════════════════════════════════════════════════════════════════
21
+
22
+
23
+ class DependencyInstallError(Exception):
24
+ """Raised when dependency installation fails in strict mode."""
25
+
26
+ def __init__(self, package_manager: str, message: str):
27
+ self.package_manager = package_manager
28
+ self.message = message
29
+ super().__init__(f"{package_manager}: {message}")
30
+
31
+
32
+ # ═══════════════════════════════════════════════════════════════════════════════
33
+ # Package Manager Detection
34
+ # ═══════════════════════════════════════════════════════════════════════════════
35
+
36
+ # Detection order matters - lock files take priority over manifest files
37
+ DETECTION_ORDER = [
38
+ # JavaScript lock files (priority)
39
+ ("pnpm-lock.yaml", "pnpm"),
40
+ ("yarn.lock", "yarn"),
41
+ ("bun.lockb", "bun"),
42
+ ("package-lock.json", "npm"),
43
+ # Python lock files (priority)
44
+ ("uv.lock", "uv"),
45
+ ("poetry.lock", "poetry"),
46
+ # Java build files
47
+ ("pom.xml", "maven"),
48
+ ("build.gradle.kts", "gradle"),
49
+ ("build.gradle", "gradle"),
50
+ # Fallback manifest files
51
+ ("package.json", "npm"), # JS fallback
52
+ ("pyproject.toml", "pip"), # Python fallback
53
+ ("requirements.txt", "pip"),
54
+ ]
55
+
56
+
57
+ def detect_package_manager(workspace: Path) -> str | None:
58
+ """Detect the package manager from project files.
59
+
60
+ Base detection on the presence of lock files and manifest files.
61
+ Give lock files priority over manifest files.
62
+
63
+ Args:
64
+ workspace: Path to the project workspace
65
+
66
+ Returns:
67
+ Package manager name or None if not detected.
68
+ Possible values: 'npm', 'pnpm', 'yarn', 'bun', 'poetry', 'uv', 'pip', 'maven', 'gradle'
69
+ """
70
+ for filename, package_manager in DETECTION_ORDER:
71
+ if (workspace / filename).exists():
72
+ return package_manager
73
+
74
+ return None
75
+
76
+
77
+ # ═══════════════════════════════════════════════════════════════════════════════
78
+ # Install Commands
79
+ # ═══════════════════════════════════════════════════════════════════════════════
80
+
81
+ INSTALL_COMMANDS = {
82
+ # JavaScript
83
+ "npm": ["npm", "install"],
84
+ "pnpm": ["pnpm", "install"],
85
+ "yarn": ["yarn", "install"],
86
+ "bun": ["bun", "install"],
87
+ # Python
88
+ "poetry": ["poetry", "install"],
89
+ "uv": ["uv", "sync"],
90
+ "pip": ["pip", "install", "-r", "requirements.txt"],
91
+ # Java
92
+ "maven": ["mvn", "install", "-DskipTests"],
93
+ "gradle": ["gradle", "dependencies"],
94
+ }
95
+
96
+
97
+ def get_install_command(package_manager: str) -> list[str] | None:
98
+ """Return the install command for a package manager.
99
+
100
+ Args:
101
+ package_manager: Name of the package manager
102
+
103
+ Returns:
104
+ List of command arguments or None if unknown
105
+ """
106
+ return INSTALL_COMMANDS.get(package_manager)
107
+
108
+
109
+ # ═══════════════════════════════════════════════════════════════════════════════
110
+ # Dependency Installation
111
+ # ═══════════════════════════════════════════════════════════════════════════════
112
+
113
+
114
+ def install_dependencies(
115
+ workspace: Path,
116
+ package_manager: str,
117
+ strict: bool = False,
118
+ ) -> bool:
119
+ """Run the dependency installation command.
120
+
121
+ Args:
122
+ workspace: Path to project workspace
123
+ package_manager: Detected package manager name
124
+ strict: If True, raise on failure. If False (default), warn and continue.
125
+
126
+ Returns:
127
+ True if install succeeded, False if failed (only when strict=False)
128
+
129
+ Raises:
130
+ DependencyInstallError: If strict=True and installation fails
131
+ """
132
+ cmd = get_install_command(package_manager)
133
+
134
+ if cmd is None:
135
+ if strict:
136
+ raise DependencyInstallError(package_manager, "Unknown package manager")
137
+ return False
138
+
139
+ try:
140
+ result = subprocess.run(
141
+ cmd,
142
+ cwd=workspace,
143
+ capture_output=True,
144
+ text=True,
145
+ )
146
+
147
+ if result.returncode == 0:
148
+ return True
149
+
150
+ # Installation failed
151
+ error_msg = result.stderr or result.stdout or "Unknown error"
152
+ if strict:
153
+ raise DependencyInstallError(package_manager, f"Command failed: {error_msg}")
154
+
155
+ return False
156
+
157
+ except FileNotFoundError:
158
+ # Package manager not installed
159
+ if strict:
160
+ raise DependencyInstallError(
161
+ package_manager,
162
+ f"'{cmd[0]}' not found. Is {package_manager} installed?",
163
+ )
164
+ return False
165
+
166
+
167
+ def auto_install_dependencies(workspace: Path, strict: bool = False) -> bool:
168
+ """Detect the package manager and install dependencies.
169
+
170
+ Combine detection and installation as a convenience function.
171
+
172
+ Args:
173
+ workspace: Path to project workspace
174
+ strict: If True, raise on failure. If False (default), warn and continue.
175
+
176
+ Returns:
177
+ True if install succeeded, False if failed or no package manager detected
178
+
179
+ Raises:
180
+ DependencyInstallError: If strict=True and installation fails
181
+ """
182
+ package_manager = detect_package_manager(workspace)
183
+
184
+ if package_manager is None:
185
+ if strict:
186
+ raise DependencyInstallError("unknown", "No package manager detected")
187
+ return False
188
+
189
+ return install_dependencies(workspace, package_manager, strict=strict)