devvy 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cli/__init__.py +1 -0
- cli/__main__.py +9 -0
- cli/ado_client.py +212 -0
- cli/commands/__init__.py +1 -0
- cli/commands/init.py +107 -0
- cli/commands/logs.py +36 -0
- cli/commands/ps.py +72 -0
- cli/commands/resume.py +105 -0
- cli/commands/run.py +135 -0
- cli/commands/status.py +35 -0
- cli/config.py +69 -0
- cli/db.py +138 -0
- cli/fsm.py +87 -0
- cli/local_runner/__init__.py +85 -0
- cli/local_runner/credentials.py +101 -0
- cli/local_runner/docker_primitives.py +142 -0
- cli/local_runner/repo_detection.py +182 -0
- cli/local_runner/validation.py +203 -0
- cli/local_runner/workspace.py +295 -0
- cli/main.py +22 -0
- cli/models.py +83 -0
- cli/orchestrator.py +530 -0
- cli/prompts.py +42 -0
- cli/ui/__init__.py +55 -0
- cli/ui/picker.py +179 -0
- cli/ui/rendering.py +350 -0
- devvy-0.1.0.dist-info/METADATA +260 -0
- devvy-0.1.0.dist-info/RECORD +31 -0
- devvy-0.1.0.dist-info/WHEEL +4 -0
- devvy-0.1.0.dist-info/entry_points.txt +2 -0
- devvy-0.1.0.dist-info/licenses/LICENSE +21 -0
cli/commands/status.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""coding-agent status <id> — show current state of a ticket from local DB."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
|
|
7
|
+
import typer
|
|
8
|
+
from sqlalchemy import select
|
|
9
|
+
|
|
10
|
+
import cli.models # noqa: F401 — ensure tables are registered on Base.metadata
|
|
11
|
+
from cli.db import SessionManager, init_db
|
|
12
|
+
from cli.models import GraphContext, Ticket
|
|
13
|
+
from cli.ui import err_console, print_status_panel
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def status(ticket_id: str = typer.Argument(..., help="Ticket ID")) -> None:
|
|
17
|
+
"""Print the current state and metadata of a ticket."""
|
|
18
|
+
asyncio.run(_status_async(ticket_id))
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
async def _status_async(ticket_id: str) -> None:
|
|
22
|
+
await init_db()
|
|
23
|
+
|
|
24
|
+
async with SessionManager.session() as session:
|
|
25
|
+
ticket = await session.get(Ticket, ticket_id)
|
|
26
|
+
if ticket is None:
|
|
27
|
+
err_console.print(f"[red]Ticket {ticket_id!r} not found.[/red]")
|
|
28
|
+
raise typer.Exit(1)
|
|
29
|
+
|
|
30
|
+
result = await session.execute(
|
|
31
|
+
select(GraphContext).where(GraphContext.ticket_id == ticket_id)
|
|
32
|
+
)
|
|
33
|
+
ctx = result.scalar_one_or_none()
|
|
34
|
+
|
|
35
|
+
print_status_panel(ticket, ctx)
|
cli/config.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""CLI configuration — loading and typed representation.
|
|
2
|
+
|
|
3
|
+
Config is read from ~/.coding-agent.toml under the [coding_agent] section.
|
|
4
|
+
``load_config()`` is the single entry point used by all commands; it raises
|
|
5
|
+
``ConfigNotFoundError`` if the file doesn't exist (i.e. ``devvy init`` hasn't
|
|
6
|
+
been run yet).
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import tomllib
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
CONFIG_PATH = Path.home() / ".coding-agent.toml"
|
|
16
|
+
DEFAULT_MODEL = "github-copilot/claude-sonnet-4.6"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ConfigNotFoundError(Exception):
|
|
20
|
+
"""Raised when the configuration file cannot be found."""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(frozen=True)
|
|
24
|
+
class Config:
|
|
25
|
+
"""Typed, validated representation of ~/.coding-agent.toml [coding_agent]."""
|
|
26
|
+
|
|
27
|
+
opencode_model: str
|
|
28
|
+
ado_org_url: str
|
|
29
|
+
ado_project: str
|
|
30
|
+
ado_pat: str
|
|
31
|
+
repo_url: str
|
|
32
|
+
env_file: str | None # path to the .env file to copy into every cloned workspace
|
|
33
|
+
|
|
34
|
+
def __post_init__(self) -> None:
|
|
35
|
+
missing = [
|
|
36
|
+
field
|
|
37
|
+
for field, value in [
|
|
38
|
+
("ado_org_url", self.ado_org_url),
|
|
39
|
+
("ado_project", self.ado_project),
|
|
40
|
+
("ado_pat", self.ado_pat),
|
|
41
|
+
]
|
|
42
|
+
if not value
|
|
43
|
+
]
|
|
44
|
+
if missing:
|
|
45
|
+
raise ValueError(
|
|
46
|
+
f"Missing required config field(s): {', '.join(missing)}. "
|
|
47
|
+
"Run `devvy init` to configure."
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def load_config() -> Config:
|
|
52
|
+
"""Load config from ~/.coding-agent.toml. Raises ConfigNotFoundError if missing."""
|
|
53
|
+
if not CONFIG_PATH.exists():
|
|
54
|
+
raise ConfigNotFoundError(
|
|
55
|
+
f"No configuration found at {CONFIG_PATH}. Run `devvy init` first."
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
with CONFIG_PATH.open("rb") as f:
|
|
59
|
+
data = tomllib.load(f)
|
|
60
|
+
|
|
61
|
+
section = data.get("coding_agent", {})
|
|
62
|
+
return Config(
|
|
63
|
+
opencode_model=section.get("opencode_model", DEFAULT_MODEL),
|
|
64
|
+
ado_org_url=section.get("ado_org_url", ""),
|
|
65
|
+
ado_project=section.get("ado_project", ""),
|
|
66
|
+
ado_pat=section.get("ado_pat", ""),
|
|
67
|
+
repo_url=section.get("repo_url", ""),
|
|
68
|
+
env_file=section.get("env_file") or None,
|
|
69
|
+
)
|
cli/db.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"""Local SQLite database session manager for the CLI.
|
|
2
|
+
|
|
3
|
+
Uses SQLAlchemy async engine backed by aiosqlite. The database file lives at
|
|
4
|
+
~/.devvy/devvy.db and is created automatically on first use.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
async with db.session() as session:
|
|
8
|
+
session.add(obj)
|
|
9
|
+
# auto-committed and closed on exit
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
from contextlib import asynccontextmanager
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import AsyncGenerator
|
|
17
|
+
|
|
18
|
+
from sqlalchemy import text
|
|
19
|
+
from sqlalchemy.exc import OperationalError
|
|
20
|
+
from sqlalchemy.ext.asyncio import (
|
|
21
|
+
AsyncSession,
|
|
22
|
+
async_sessionmaker,
|
|
23
|
+
create_async_engine,
|
|
24
|
+
)
|
|
25
|
+
from sqlalchemy.orm import DeclarativeBase
|
|
26
|
+
|
|
27
|
+
DB_DIR = Path.home() / ".devvy"
|
|
28
|
+
DB_PATH = DB_DIR / "devvy.db"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Base(DeclarativeBase):
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class _SessionManager:
|
|
36
|
+
"""Thin async session manager around a SQLite engine."""
|
|
37
|
+
|
|
38
|
+
def __init__(self) -> None:
|
|
39
|
+
self._engine = None
|
|
40
|
+
self._session_maker: async_sessionmaker[AsyncSession] | None = None
|
|
41
|
+
|
|
42
|
+
def init(self, db_path: Path = DB_PATH) -> None:
|
|
43
|
+
"""Initialise the engine. Call once before first use (e.g. in CLI startup).
|
|
44
|
+
|
|
45
|
+
Safe to call more than once — disposes the previous engine first.
|
|
46
|
+
"""
|
|
47
|
+
if self._engine is not None:
|
|
48
|
+
# Schedule the old engine for disposal. We can't await here (sync
|
|
49
|
+
# method), so we use the sync dispose variant which is fine for the
|
|
50
|
+
# SQLite + aiosqlite driver (it doesn't hold open connections between
|
|
51
|
+
# requests).
|
|
52
|
+
import asyncio as _asyncio
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
loop = _asyncio.get_event_loop()
|
|
56
|
+
if loop.is_running():
|
|
57
|
+
loop.create_task(self._engine.dispose())
|
|
58
|
+
else:
|
|
59
|
+
loop.run_until_complete(self._engine.dispose())
|
|
60
|
+
except RuntimeError:
|
|
61
|
+
pass # No event loop — engine will be GC'd harmlessly.
|
|
62
|
+
DB_DIR.mkdir(parents=True, exist_ok=True)
|
|
63
|
+
url = f"sqlite+aiosqlite:///{db_path}"
|
|
64
|
+
self._engine = create_async_engine(url, echo=False)
|
|
65
|
+
self._session_maker = async_sessionmaker(
|
|
66
|
+
bind=self._engine,
|
|
67
|
+
autoflush=False,
|
|
68
|
+
expire_on_commit=False,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
async def close(self) -> None:
|
|
72
|
+
"""Dispose the engine and release all connections."""
|
|
73
|
+
if self._engine is not None:
|
|
74
|
+
await self._engine.dispose()
|
|
75
|
+
self._engine = None
|
|
76
|
+
self._session_maker = None
|
|
77
|
+
|
|
78
|
+
async def create_tables(self) -> None:
|
|
79
|
+
"""Create all tables defined on Base if they don't exist."""
|
|
80
|
+
if self._engine is None:
|
|
81
|
+
raise RuntimeError("SessionManager not initialised — call .init() first")
|
|
82
|
+
async with self._engine.begin() as conn:
|
|
83
|
+
await conn.run_sync(Base.metadata.create_all)
|
|
84
|
+
|
|
85
|
+
@asynccontextmanager
|
|
86
|
+
async def session(self) -> AsyncGenerator[AsyncSession, None]:
|
|
87
|
+
"""Yield a session that auto-commits on clean exit and rolls back on error."""
|
|
88
|
+
if self._session_maker is None:
|
|
89
|
+
raise RuntimeError("SessionManager not initialised — call .init() first")
|
|
90
|
+
async with self._session_maker() as sess:
|
|
91
|
+
try:
|
|
92
|
+
yield sess
|
|
93
|
+
await sess.commit()
|
|
94
|
+
except Exception:
|
|
95
|
+
await sess.rollback()
|
|
96
|
+
raise
|
|
97
|
+
finally:
|
|
98
|
+
await sess.close()
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
SessionManager = _SessionManager()
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
async def init_db(db_path: Path = DB_PATH) -> None:
|
|
105
|
+
"""Initialise the database engine and create all tables.
|
|
106
|
+
|
|
107
|
+
Convenience wrapper used by every CLI command so they don't need to
|
|
108
|
+
repeat the ``SessionManager.init() / await SessionManager.create_tables()``
|
|
109
|
+
boilerplate.
|
|
110
|
+
"""
|
|
111
|
+
SessionManager.init(db_path)
|
|
112
|
+
await SessionManager.create_tables()
|
|
113
|
+
await _maybe_migrate()
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async def _maybe_migrate() -> None:
|
|
117
|
+
"""Apply lightweight additive migrations that are safe to run repeatedly.
|
|
118
|
+
|
|
119
|
+
Each migration is wrapped in a try/except so it is idempotent: SQLite
|
|
120
|
+
raises ``OperationalError`` with "duplicate column name" when a column
|
|
121
|
+
already exists, which we silently ignore.
|
|
122
|
+
"""
|
|
123
|
+
engine = SessionManager._engine # type: ignore[attr-defined]
|
|
124
|
+
if engine is None:
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
_new_columns = [
|
|
128
|
+
# (table_name, column_definition)
|
|
129
|
+
("graph_contexts", "worker_pid INTEGER"),
|
|
130
|
+
]
|
|
131
|
+
|
|
132
|
+
async with engine.begin() as conn:
|
|
133
|
+
for table, col_def in _new_columns:
|
|
134
|
+
try:
|
|
135
|
+
await conn.execute(text(f"ALTER TABLE {table} ADD COLUMN {col_def}"))
|
|
136
|
+
except OperationalError:
|
|
137
|
+
# Column already exists — nothing to do.
|
|
138
|
+
pass
|
cli/fsm.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""State machine for the autonomous coding agent.
|
|
2
|
+
|
|
3
|
+
Defines all valid states and the legal transition graph.
|
|
4
|
+
All state transitions must go through ``transition()`` to enforce correctness.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from enum import Enum
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TicketState(str, Enum):
|
|
13
|
+
"""All lifecycle states a ticket can be in."""
|
|
14
|
+
|
|
15
|
+
RECEIVED = "RECEIVED"
|
|
16
|
+
PREPARE_ENV = "PREPARE_ENV"
|
|
17
|
+
PLAN = "PLAN"
|
|
18
|
+
IMPLEMENT = "IMPLEMENT"
|
|
19
|
+
VALIDATE = "VALIDATE"
|
|
20
|
+
CREATE_PR = "CREATE_PR"
|
|
21
|
+
WAIT_FOR_REVIEW = "WAIT_FOR_REVIEW"
|
|
22
|
+
RESPOND_TO_REVIEW = "RESPOND_TO_REVIEW"
|
|
23
|
+
MERGED = "MERGED"
|
|
24
|
+
FAILED = "FAILED"
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def is_terminal(self) -> bool:
|
|
28
|
+
"""True for states from which no further transitions are possible."""
|
|
29
|
+
return self in (TicketState.MERGED, TicketState.FAILED)
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def is_active(self) -> bool:
|
|
33
|
+
"""True for states where the orchestrator is actively running work."""
|
|
34
|
+
return self not in (
|
|
35
|
+
TicketState.WAIT_FOR_REVIEW,
|
|
36
|
+
TicketState.MERGED,
|
|
37
|
+
TicketState.FAILED,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# Directed adjacency list — only these transitions are legal.
|
|
42
|
+
VALID_TRANSITIONS: dict[TicketState, list[TicketState]] = {
|
|
43
|
+
TicketState.RECEIVED: [TicketState.PREPARE_ENV, TicketState.FAILED],
|
|
44
|
+
TicketState.PREPARE_ENV: [TicketState.PLAN, TicketState.FAILED],
|
|
45
|
+
TicketState.PLAN: [TicketState.IMPLEMENT, TicketState.FAILED],
|
|
46
|
+
TicketState.IMPLEMENT: [TicketState.VALIDATE, TicketState.FAILED],
|
|
47
|
+
TicketState.VALIDATE: [
|
|
48
|
+
TicketState.CREATE_PR,
|
|
49
|
+
TicketState.WAIT_FOR_REVIEW,
|
|
50
|
+
TicketState.IMPLEMENT,
|
|
51
|
+
TicketState.FAILED,
|
|
52
|
+
],
|
|
53
|
+
TicketState.CREATE_PR: [TicketState.WAIT_FOR_REVIEW, TicketState.FAILED],
|
|
54
|
+
TicketState.WAIT_FOR_REVIEW: [
|
|
55
|
+
TicketState.RESPOND_TO_REVIEW,
|
|
56
|
+
TicketState.MERGED,
|
|
57
|
+
TicketState.FAILED,
|
|
58
|
+
],
|
|
59
|
+
TicketState.RESPOND_TO_REVIEW: [
|
|
60
|
+
TicketState.VALIDATE,
|
|
61
|
+
TicketState.WAIT_FOR_REVIEW,
|
|
62
|
+
TicketState.FAILED,
|
|
63
|
+
],
|
|
64
|
+
TicketState.MERGED: [],
|
|
65
|
+
TicketState.FAILED: [],
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class InvalidTransitionError(Exception):
|
|
70
|
+
"""Raised when an illegal state transition is attempted."""
|
|
71
|
+
|
|
72
|
+
def __init__(self, current: TicketState, target: TicketState) -> None:
|
|
73
|
+
super().__init__(f"Cannot transition from {current.value} to {target.value}")
|
|
74
|
+
self.current = current
|
|
75
|
+
self.target = target
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def transition(current: TicketState, target: TicketState) -> TicketState:
|
|
79
|
+
"""
|
|
80
|
+
Validate and perform a state transition.
|
|
81
|
+
|
|
82
|
+
Raises:
|
|
83
|
+
InvalidTransitionError: if the target state is not reachable from current.
|
|
84
|
+
"""
|
|
85
|
+
if target not in VALID_TRANSITIONS.get(current, []):
|
|
86
|
+
raise InvalidTransitionError(current, target)
|
|
87
|
+
return target
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""cli.local_runner — public API re-exports.
|
|
2
|
+
|
|
3
|
+
All callers that do ``from cli import local_runner`` or
|
|
4
|
+
``local_runner.<symbol>`` continue to work unchanged. Symbols are imported
|
|
5
|
+
from their new sub-modules and re-exported here.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from cli.local_runner.docker_primitives import (
|
|
9
|
+
CONTAINER_WORKSPACE,
|
|
10
|
+
OPENCODE_AUTH_CONTAINER,
|
|
11
|
+
OPENCODE_AUTH_HOST,
|
|
12
|
+
WORKER_IMAGE,
|
|
13
|
+
WORKSPACE_BASE,
|
|
14
|
+
_check_tool,
|
|
15
|
+
_check_worker_image,
|
|
16
|
+
_run,
|
|
17
|
+
_start_worker_container,
|
|
18
|
+
)
|
|
19
|
+
from cli.local_runner.credentials import (
|
|
20
|
+
_install_git_credentials,
|
|
21
|
+
_make_cred_line,
|
|
22
|
+
_write_git_credentials,
|
|
23
|
+
)
|
|
24
|
+
from cli.local_runner.repo_detection import (
|
|
25
|
+
_INFRA_SERVICE_NAMES,
|
|
26
|
+
_pick_compose_service,
|
|
27
|
+
detect_repo_container,
|
|
28
|
+
RepoContainerConfig,
|
|
29
|
+
)
|
|
30
|
+
from cli.local_runner.validation import (
|
|
31
|
+
_exec_script,
|
|
32
|
+
_file_exists_in_container,
|
|
33
|
+
run_validation,
|
|
34
|
+
ValidationResult,
|
|
35
|
+
)
|
|
36
|
+
from cli.local_runner.workspace import (
|
|
37
|
+
cleanup_workspace,
|
|
38
|
+
commit_if_changed,
|
|
39
|
+
prepare_workspace,
|
|
40
|
+
push_branch,
|
|
41
|
+
read_pr_template,
|
|
42
|
+
recover_container,
|
|
43
|
+
run_git,
|
|
44
|
+
run_opencode,
|
|
45
|
+
workspace_path,
|
|
46
|
+
WorkspaceSetup,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
__all__ = [
|
|
50
|
+
# docker_primitives
|
|
51
|
+
"CONTAINER_WORKSPACE",
|
|
52
|
+
"OPENCODE_AUTH_CONTAINER",
|
|
53
|
+
"OPENCODE_AUTH_HOST",
|
|
54
|
+
"WORKER_IMAGE",
|
|
55
|
+
"_check_tool",
|
|
56
|
+
"_check_worker_image",
|
|
57
|
+
"_run",
|
|
58
|
+
"_start_worker_container",
|
|
59
|
+
# credentials
|
|
60
|
+
"_install_git_credentials",
|
|
61
|
+
"_make_cred_line",
|
|
62
|
+
"_write_git_credentials",
|
|
63
|
+
# repo_detection
|
|
64
|
+
"_INFRA_SERVICE_NAMES",
|
|
65
|
+
"_pick_compose_service",
|
|
66
|
+
"detect_repo_container",
|
|
67
|
+
"RepoContainerConfig",
|
|
68
|
+
# validation
|
|
69
|
+
"_exec_script",
|
|
70
|
+
"_file_exists_in_container",
|
|
71
|
+
"run_validation",
|
|
72
|
+
"ValidationResult",
|
|
73
|
+
# workspace
|
|
74
|
+
"cleanup_workspace",
|
|
75
|
+
"commit_if_changed",
|
|
76
|
+
"prepare_workspace",
|
|
77
|
+
"push_branch",
|
|
78
|
+
"read_pr_template",
|
|
79
|
+
"recover_container",
|
|
80
|
+
"run_git",
|
|
81
|
+
"run_opencode",
|
|
82
|
+
"workspace_path",
|
|
83
|
+
"WorkspaceSetup",
|
|
84
|
+
"WORKSPACE_BASE",
|
|
85
|
+
]
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""Git credential management for the devvy worker container.
|
|
2
|
+
|
|
3
|
+
Credentials are written via ``docker cp`` so the PAT is never passed through
|
|
4
|
+
a shell command line (avoiding injection via special characters).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import tempfile
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from urllib.parse import urlparse, urlunparse
|
|
12
|
+
|
|
13
|
+
from cli.local_runner.docker_primitives import _run
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _make_cred_line(repo_url: str, ado_pat: str) -> str:
|
|
17
|
+
"""
|
|
18
|
+
Build a git credential-store entry for the given repo URL and PAT.
|
|
19
|
+
|
|
20
|
+
Format: scheme://user:PAT@host
|
|
21
|
+
|
|
22
|
+
The username is preserved from the original URL (e.g. the ADO org name that
|
|
23
|
+
is often embedded as 'OrgName@dev.azure.com'). git credential store matches
|
|
24
|
+
on username too, so the entry username must match the URL username or git
|
|
25
|
+
will fall through to an interactive prompt.
|
|
26
|
+
|
|
27
|
+
The PAT must be in the password field — if only a username is present git
|
|
28
|
+
will prompt for the missing password.
|
|
29
|
+
"""
|
|
30
|
+
parsed = urlparse(repo_url)
|
|
31
|
+
username = parsed.username or "pat"
|
|
32
|
+
return urlunparse(parsed._replace(netloc=f"{username}:{ado_pat}@{parsed.hostname}"))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _write_git_credentials(ticket_id: str, repo_url: str, ado_pat: str) -> Path:
|
|
36
|
+
"""
|
|
37
|
+
Write a temporary git credentials file for the given repo URL and PAT.
|
|
38
|
+
|
|
39
|
+
The file is placed at /tmp/devvy-creds-<ticket_id> — outside the workspace
|
|
40
|
+
bind-mount — so the validation container cannot read it.
|
|
41
|
+
|
|
42
|
+
Returns the path to the credentials file (caller must delete it in a finally block).
|
|
43
|
+
"""
|
|
44
|
+
cred_line = _make_cred_line(repo_url, ado_pat)
|
|
45
|
+
creds_path = Path(f"/tmp/devvy-creds-{ticket_id}")
|
|
46
|
+
creds_path.write_text(cred_line + "\n", encoding="utf-8")
|
|
47
|
+
creds_path.chmod(0o600)
|
|
48
|
+
return creds_path
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
async def _install_git_credentials(
|
|
52
|
+
container_id: str, repo_url: str, ado_pat: str, ticket_id: str
|
|
53
|
+
) -> None:
|
|
54
|
+
"""
|
|
55
|
+
Write a git credentials file inside *container_id* without using shell
|
|
56
|
+
interpolation of the credential string.
|
|
57
|
+
|
|
58
|
+
Strategy: write a host-side tempfile, ``docker cp`` it into the container
|
|
59
|
+
at /root/.git-credentials, then configure git to use the credential store.
|
|
60
|
+
This avoids any risk of shell injection from special characters in the PAT
|
|
61
|
+
or repo URL.
|
|
62
|
+
"""
|
|
63
|
+
with tempfile.NamedTemporaryFile(
|
|
64
|
+
mode="w",
|
|
65
|
+
prefix=f"devvy-creds-{ticket_id[:8]}-",
|
|
66
|
+
suffix=".txt",
|
|
67
|
+
delete=False,
|
|
68
|
+
encoding="utf-8",
|
|
69
|
+
) as tmp:
|
|
70
|
+
tmp.write(_make_cred_line(repo_url, ado_pat) + "\n")
|
|
71
|
+
tmp_path = Path(tmp.name)
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
tmp_path.chmod(0o600)
|
|
75
|
+
await _run(
|
|
76
|
+
["docker", "cp", str(tmp_path), f"{container_id}:/root/.git-credentials"]
|
|
77
|
+
)
|
|
78
|
+
await _run(
|
|
79
|
+
[
|
|
80
|
+
"docker",
|
|
81
|
+
"exec",
|
|
82
|
+
container_id,
|
|
83
|
+
"chmod",
|
|
84
|
+
"600",
|
|
85
|
+
"/root/.git-credentials",
|
|
86
|
+
]
|
|
87
|
+
)
|
|
88
|
+
await _run(
|
|
89
|
+
[
|
|
90
|
+
"docker",
|
|
91
|
+
"exec",
|
|
92
|
+
container_id,
|
|
93
|
+
"git",
|
|
94
|
+
"config",
|
|
95
|
+
"--global",
|
|
96
|
+
"credential.helper",
|
|
97
|
+
"store",
|
|
98
|
+
]
|
|
99
|
+
)
|
|
100
|
+
finally:
|
|
101
|
+
tmp_path.unlink(missing_ok=True)
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""Low-level Docker subprocess wrappers.
|
|
2
|
+
|
|
3
|
+
These are the only functions that shell out to the Docker CLI. Everything else
|
|
4
|
+
in the local_runner package calls through here so there is one place to swap
|
|
5
|
+
the Docker backend or adjust error handling.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import shutil
|
|
12
|
+
import sys
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
WORKSPACE_BASE = Path.home() / ".devvy" / "workspaces"
|
|
17
|
+
WORKER_IMAGE = "devvy-worker:latest"
|
|
18
|
+
CONTAINER_WORKSPACE = "/workspace"
|
|
19
|
+
OPENCODE_AUTH_HOST = Path.home() / ".local" / "share" / "opencode" / "auth.json"
|
|
20
|
+
OPENCODE_AUTH_CONTAINER = "/root/.local/share/opencode/auth.json"
|
|
21
|
+
|
|
22
|
+
_DOCKERFILE_NAME = "worker.dockerfile"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _check_tool(name: str) -> None:
|
|
26
|
+
if shutil.which(name) is None:
|
|
27
|
+
raise RuntimeError(
|
|
28
|
+
f"'{name}' not found on PATH. Please install it before running devvy."
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _find_repo_root() -> Path | None:
|
|
33
|
+
"""Walk up from this file looking for worker.dockerfile to find the repo root."""
|
|
34
|
+
current = Path(__file__).resolve().parent
|
|
35
|
+
for _ in range(10): # guard against infinite traversal
|
|
36
|
+
if (current / _DOCKERFILE_NAME).exists():
|
|
37
|
+
return current
|
|
38
|
+
if current.parent == current:
|
|
39
|
+
break
|
|
40
|
+
current = current.parent
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
async def _run(
|
|
45
|
+
cmd: list[str],
|
|
46
|
+
cwd: Path | None = None,
|
|
47
|
+
check: bool = True,
|
|
48
|
+
) -> str:
|
|
49
|
+
"""Run a subprocess asynchronously, return combined stdout+stderr."""
|
|
50
|
+
proc = await asyncio.create_subprocess_exec(
|
|
51
|
+
*cmd,
|
|
52
|
+
stdout=asyncio.subprocess.PIPE,
|
|
53
|
+
stderr=asyncio.subprocess.STDOUT,
|
|
54
|
+
cwd=cwd,
|
|
55
|
+
)
|
|
56
|
+
stdout, _ = await proc.communicate()
|
|
57
|
+
output = stdout.decode("utf-8", errors="replace") if stdout else ""
|
|
58
|
+
if check and proc.returncode != 0:
|
|
59
|
+
raise RuntimeError(
|
|
60
|
+
f"`{' '.join(cmd)}` failed (exit {proc.returncode}):\n{output}"
|
|
61
|
+
)
|
|
62
|
+
return output
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
async def _build_worker_image(repo_root: Path) -> None:
|
|
66
|
+
"""Build devvy-worker:latest, streaming Docker output to stderr."""
|
|
67
|
+
print(
|
|
68
|
+
f"devvy-worker image not found — building it now (this only happens once)...",
|
|
69
|
+
file=sys.stderr,
|
|
70
|
+
)
|
|
71
|
+
proc = await asyncio.create_subprocess_exec(
|
|
72
|
+
"docker",
|
|
73
|
+
"build",
|
|
74
|
+
"-f",
|
|
75
|
+
_DOCKERFILE_NAME,
|
|
76
|
+
"-t",
|
|
77
|
+
WORKER_IMAGE,
|
|
78
|
+
".",
|
|
79
|
+
cwd=repo_root,
|
|
80
|
+
stdout=sys.stderr,
|
|
81
|
+
stderr=sys.stderr,
|
|
82
|
+
)
|
|
83
|
+
await proc.wait()
|
|
84
|
+
if proc.returncode != 0:
|
|
85
|
+
raise RuntimeError(
|
|
86
|
+
f"Failed to build '{WORKER_IMAGE}' (exit {proc.returncode}).\n"
|
|
87
|
+
f"You can retry manually from {repo_root}:\n\n"
|
|
88
|
+
f" docker build -f {_DOCKERFILE_NAME} -t {WORKER_IMAGE} .\n"
|
|
89
|
+
)
|
|
90
|
+
print(f"Successfully built {WORKER_IMAGE}.", file=sys.stderr)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
async def _check_worker_image() -> None:
|
|
94
|
+
"""Ensure devvy-worker:latest exists locally, auto-building it if not."""
|
|
95
|
+
try:
|
|
96
|
+
await _run(["docker", "image", "inspect", WORKER_IMAGE], check=True)
|
|
97
|
+
return # image already present
|
|
98
|
+
except RuntimeError:
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
repo_root = _find_repo_root()
|
|
102
|
+
if repo_root is None:
|
|
103
|
+
raise RuntimeError(
|
|
104
|
+
f"Docker image '{WORKER_IMAGE}' not found and '{_DOCKERFILE_NAME}' "
|
|
105
|
+
f"could not be located.\n"
|
|
106
|
+
f"Build the image manually from the devvy repo root:\n\n"
|
|
107
|
+
f" docker build -f {_DOCKERFILE_NAME} -t {WORKER_IMAGE} .\n"
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
await _build_worker_image(repo_root)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
async def _start_worker_container(
|
|
114
|
+
ticket_id: str,
|
|
115
|
+
ws: Path,
|
|
116
|
+
repo_url: str,
|
|
117
|
+
ado_pat: str,
|
|
118
|
+
) -> str:
|
|
119
|
+
"""Spin up a devvy-worker container for *ticket_id* with *ws* bind-mounted.
|
|
120
|
+
|
|
121
|
+
Mounts the opencode auth file read-only if it exists on the host, installs
|
|
122
|
+
git credentials inside the container, and returns the container ID.
|
|
123
|
+
"""
|
|
124
|
+
from cli.local_runner.credentials import _install_git_credentials
|
|
125
|
+
|
|
126
|
+
container_name = f"devvy-worker-{ticket_id[:8]}"
|
|
127
|
+
docker_cmd = [
|
|
128
|
+
"docker",
|
|
129
|
+
"run",
|
|
130
|
+
"--detach",
|
|
131
|
+
"--name",
|
|
132
|
+
container_name,
|
|
133
|
+
"-v",
|
|
134
|
+
f"{ws}:{CONTAINER_WORKSPACE}",
|
|
135
|
+
]
|
|
136
|
+
if OPENCODE_AUTH_HOST.exists():
|
|
137
|
+
docker_cmd += ["-v", f"{OPENCODE_AUTH_HOST}:{OPENCODE_AUTH_CONTAINER}:ro"]
|
|
138
|
+
docker_cmd.append(WORKER_IMAGE)
|
|
139
|
+
|
|
140
|
+
container_id = (await _run(docker_cmd)).strip()
|
|
141
|
+
await _install_git_credentials(container_id, repo_url, ado_pat, ticket_id)
|
|
142
|
+
return container_id
|