openhands-agent-server 1.8.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openhands/agent_server/__init__.py +0 -0
- openhands/agent_server/__main__.py +118 -0
- openhands/agent_server/api.py +331 -0
- openhands/agent_server/bash_router.py +105 -0
- openhands/agent_server/bash_service.py +379 -0
- openhands/agent_server/config.py +187 -0
- openhands/agent_server/conversation_router.py +321 -0
- openhands/agent_server/conversation_service.py +692 -0
- openhands/agent_server/dependencies.py +72 -0
- openhands/agent_server/desktop_router.py +47 -0
- openhands/agent_server/desktop_service.py +212 -0
- openhands/agent_server/docker/Dockerfile +244 -0
- openhands/agent_server/docker/build.py +825 -0
- openhands/agent_server/docker/wallpaper.svg +22 -0
- openhands/agent_server/env_parser.py +460 -0
- openhands/agent_server/event_router.py +204 -0
- openhands/agent_server/event_service.py +648 -0
- openhands/agent_server/file_router.py +121 -0
- openhands/agent_server/git_router.py +34 -0
- openhands/agent_server/logging_config.py +56 -0
- openhands/agent_server/middleware.py +32 -0
- openhands/agent_server/models.py +307 -0
- openhands/agent_server/openapi.py +21 -0
- openhands/agent_server/pub_sub.py +80 -0
- openhands/agent_server/py.typed +0 -0
- openhands/agent_server/server_details_router.py +43 -0
- openhands/agent_server/sockets.py +173 -0
- openhands/agent_server/tool_preload_service.py +76 -0
- openhands/agent_server/tool_router.py +22 -0
- openhands/agent_server/utils.py +63 -0
- openhands/agent_server/vscode_extensions/openhands-settings/extension.js +22 -0
- openhands/agent_server/vscode_extensions/openhands-settings/package.json +12 -0
- openhands/agent_server/vscode_router.py +70 -0
- openhands/agent_server/vscode_service.py +232 -0
- openhands_agent_server-1.8.2.dist-info/METADATA +15 -0
- openhands_agent_server-1.8.2.dist-info/RECORD +39 -0
- openhands_agent_server-1.8.2.dist-info/WHEEL +5 -0
- openhands_agent_server-1.8.2.dist-info/entry_points.txt +2 -0
- openhands_agent_server-1.8.2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,825 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Single-entry build helper for agent-server images.
|
|
4
|
+
|
|
5
|
+
- Targets: binary | binary-minimal | source | source-minimal
|
|
6
|
+
- Multi-tagging via CUSTOM_TAGS (comma-separated)
|
|
7
|
+
- Versioned tags for custom tags: {SDK_VERSION}-{CUSTOM_TAG}
|
|
8
|
+
- Branch-scoped cache keys
|
|
9
|
+
- CI (push) vs local (load) behavior
|
|
10
|
+
- sdist-based builds: Uses `uv build` to create clean build contexts
|
|
11
|
+
- One entry: build(opts: BuildOptions)
|
|
12
|
+
- Automatically detects sdk_project_root (no manual arg)
|
|
13
|
+
- No local artifacts left behind (uses tempfile dirs only)
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import argparse
|
|
17
|
+
import hashlib
|
|
18
|
+
import os
|
|
19
|
+
import re
|
|
20
|
+
import shutil
|
|
21
|
+
import subprocess
|
|
22
|
+
import sys
|
|
23
|
+
import tarfile
|
|
24
|
+
import tempfile
|
|
25
|
+
import threading
|
|
26
|
+
import tomllib
|
|
27
|
+
from contextlib import chdir
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
|
|
30
|
+
from pydantic import BaseModel, Field, field_validator
|
|
31
|
+
|
|
32
|
+
from openhands.sdk.logger import IN_CI, get_logger, rolling_log_view
|
|
33
|
+
from openhands.sdk.workspace import PlatformType, TargetType
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
logger = get_logger(__name__)
|
|
37
|
+
|
|
38
|
+
VALID_TARGETS = {"binary", "binary-minimal", "source", "source-minimal"}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# --- helpers ---
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _default_sdk_project_root() -> Path:
|
|
45
|
+
"""
|
|
46
|
+
Resolve top-level OpenHands UV workspace root:
|
|
47
|
+
|
|
48
|
+
Order:
|
|
49
|
+
1) Walk up from CWD
|
|
50
|
+
2) Walk up from this file location
|
|
51
|
+
|
|
52
|
+
Reject anything in site/dist-packages (installed wheels).
|
|
53
|
+
"""
|
|
54
|
+
site_markers = ("site-packages", "dist-packages")
|
|
55
|
+
|
|
56
|
+
def _is_workspace_root(d: Path) -> bool:
|
|
57
|
+
"""Detect if d is the root of the Agent-SDK repo UV workspace."""
|
|
58
|
+
_EXPECTED = (
|
|
59
|
+
"openhands-sdk/pyproject.toml",
|
|
60
|
+
"openhands-tools/pyproject.toml",
|
|
61
|
+
"openhands-workspace/pyproject.toml",
|
|
62
|
+
"openhands-agent-server/pyproject.toml",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
py = d / "pyproject.toml"
|
|
66
|
+
if not py.exists():
|
|
67
|
+
return False
|
|
68
|
+
try:
|
|
69
|
+
cfg = tomllib.loads(py.read_text(encoding="utf-8"))
|
|
70
|
+
except Exception:
|
|
71
|
+
cfg = {}
|
|
72
|
+
members = (
|
|
73
|
+
cfg.get("tool", {}).get("uv", {}).get("workspace", {}).get("members", [])
|
|
74
|
+
or []
|
|
75
|
+
)
|
|
76
|
+
# Accept either explicit UV members or structural presence of all subprojects
|
|
77
|
+
if members:
|
|
78
|
+
norm = {str(Path(m)) for m in members}
|
|
79
|
+
return {
|
|
80
|
+
"openhands-sdk",
|
|
81
|
+
"openhands-tools",
|
|
82
|
+
"openhands-workspace",
|
|
83
|
+
"openhands-agent-server",
|
|
84
|
+
}.issubset(norm)
|
|
85
|
+
return all((d / p).exists() for p in _EXPECTED)
|
|
86
|
+
|
|
87
|
+
def _climb(start: Path) -> Path | None:
|
|
88
|
+
cur = start.resolve()
|
|
89
|
+
if not cur.is_dir():
|
|
90
|
+
cur = cur.parent
|
|
91
|
+
while True:
|
|
92
|
+
if _is_workspace_root(cur):
|
|
93
|
+
return cur
|
|
94
|
+
if cur.parent == cur:
|
|
95
|
+
return None
|
|
96
|
+
cur = cur.parent
|
|
97
|
+
|
|
98
|
+
def validate(p: Path, src: str) -> Path:
|
|
99
|
+
if any(s in str(p) for s in site_markers):
|
|
100
|
+
raise RuntimeError(
|
|
101
|
+
f"{src}: points inside site-packages; need the source checkout."
|
|
102
|
+
)
|
|
103
|
+
root = _climb(p) or p
|
|
104
|
+
if not _is_workspace_root(root):
|
|
105
|
+
raise RuntimeError(
|
|
106
|
+
f"{src}: couldn't find the OpenHands UV workspace root "
|
|
107
|
+
f"starting at '{p}'.\n\n"
|
|
108
|
+
"Expected setup (repo root):\n"
|
|
109
|
+
" pyproject.toml # has [tool.uv.workspace] with members\n"
|
|
110
|
+
" openhands-sdk/pyproject.toml\n"
|
|
111
|
+
" openhands-tools/pyproject.toml\n"
|
|
112
|
+
" openhands-workspace/pyproject.toml\n"
|
|
113
|
+
" openhands-agent-server/pyproject.toml\n\n"
|
|
114
|
+
"Fix:\n"
|
|
115
|
+
" - Run from anywhere inside the repo."
|
|
116
|
+
)
|
|
117
|
+
return root
|
|
118
|
+
|
|
119
|
+
if root := _climb(Path.cwd()):
|
|
120
|
+
return validate(root, "CWD discovery")
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
here = Path(__file__).resolve()
|
|
124
|
+
if root := _climb(here):
|
|
125
|
+
return validate(root, "__file__ discovery")
|
|
126
|
+
except NameError:
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
# Final, user-facing guidance
|
|
130
|
+
raise RuntimeError(
|
|
131
|
+
"Could not resolve the OpenHands UV workspace root.\n\n"
|
|
132
|
+
"Expected repo layout:\n"
|
|
133
|
+
" pyproject.toml (with [tool.uv.workspace].members "
|
|
134
|
+
"including openhands/* subprojects)\n"
|
|
135
|
+
" openhands-sdk/pyproject.toml\n"
|
|
136
|
+
" openhands-tools/pyproject.toml\n"
|
|
137
|
+
" openhands-workspace/pyproject.toml\n"
|
|
138
|
+
" openhands-agent-server/pyproject.toml\n\n"
|
|
139
|
+
"Run this from inside the repo."
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def _run(
|
|
144
|
+
cmd: list[str],
|
|
145
|
+
cwd: str | None = None,
|
|
146
|
+
) -> subprocess.CompletedProcess:
|
|
147
|
+
"""
|
|
148
|
+
Stream stdout and stderr concurrently into the rolling logger,
|
|
149
|
+
while capturing FULL stdout/stderr.
|
|
150
|
+
Returns CompletedProcess(stdout=<full>, stderr=<full>).
|
|
151
|
+
Raises CalledProcessError with both output and stderr on failure.
|
|
152
|
+
"""
|
|
153
|
+
logger.info(f"$ {' '.join(cmd)} (cwd={cwd})")
|
|
154
|
+
|
|
155
|
+
proc = subprocess.Popen(
|
|
156
|
+
cmd,
|
|
157
|
+
cwd=cwd,
|
|
158
|
+
text=True,
|
|
159
|
+
stdout=subprocess.PIPE,
|
|
160
|
+
stderr=subprocess.PIPE, # keep separate
|
|
161
|
+
bufsize=1, # line-buffered
|
|
162
|
+
)
|
|
163
|
+
assert proc.stdout is not None and proc.stderr is not None
|
|
164
|
+
|
|
165
|
+
out_lines: list[str] = []
|
|
166
|
+
err_lines: list[str] = []
|
|
167
|
+
|
|
168
|
+
def pump(stream, sink: list[str], log_fn, prefix: str) -> None:
|
|
169
|
+
for line in stream:
|
|
170
|
+
line = line.rstrip("\n")
|
|
171
|
+
sink.append(line)
|
|
172
|
+
log_fn(f"{prefix}{line}")
|
|
173
|
+
|
|
174
|
+
with rolling_log_view(
|
|
175
|
+
logger,
|
|
176
|
+
header="$ " + " ".join(cmd) + (f" (cwd={cwd})" if cwd else ""),
|
|
177
|
+
):
|
|
178
|
+
t_out = threading.Thread(
|
|
179
|
+
target=pump, args=(proc.stdout, out_lines, logger.info, "[stdout] ")
|
|
180
|
+
)
|
|
181
|
+
t_err = threading.Thread(
|
|
182
|
+
target=pump, args=(proc.stderr, err_lines, logger.warning, "[stderr] ")
|
|
183
|
+
)
|
|
184
|
+
t_out.start()
|
|
185
|
+
t_err.start()
|
|
186
|
+
t_out.join()
|
|
187
|
+
t_err.join()
|
|
188
|
+
|
|
189
|
+
rc = proc.wait()
|
|
190
|
+
stdout = ("\n".join(out_lines) + "\n") if out_lines else ""
|
|
191
|
+
stderr = ("\n".join(err_lines) + "\n") if err_lines else ""
|
|
192
|
+
|
|
193
|
+
result = subprocess.CompletedProcess(cmd, rc, stdout=stdout, stderr=stderr)
|
|
194
|
+
|
|
195
|
+
if rc != 0:
|
|
196
|
+
# Include full outputs on failure
|
|
197
|
+
raise subprocess.CalledProcessError(rc, cmd, output=stdout, stderr=stderr)
|
|
198
|
+
|
|
199
|
+
return result
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _sanitize_branch(ref: str) -> str:
|
|
203
|
+
ref = re.sub(r"^refs/heads/", "", ref or "unknown")
|
|
204
|
+
return re.sub(r"[^a-zA-Z0-9.-]+", "-", ref).lower()
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def _base_slug(image: str, max_len: int = 64) -> str:
|
|
208
|
+
"""
|
|
209
|
+
If the slug is too long, keep the most identifiable parts:
|
|
210
|
+
- repository name (last path segment)
|
|
211
|
+
- tag (if present)
|
|
212
|
+
Then append a short digest for uniqueness.
|
|
213
|
+
Format preserved with existing separators: '_s_' for '/', '_tag_' for ':'.
|
|
214
|
+
|
|
215
|
+
Example:
|
|
216
|
+
'ghcr.io_s_org_s/very-long-repo_tag_v1.2.3-extra'
|
|
217
|
+
-> 'very-long-repo_tag_v1.2.3-<digest>'
|
|
218
|
+
"""
|
|
219
|
+
base_slug = image.replace("/", "_s_").replace(":", "_tag_")
|
|
220
|
+
|
|
221
|
+
if len(base_slug) <= max_len:
|
|
222
|
+
return base_slug
|
|
223
|
+
|
|
224
|
+
digest = hashlib.sha256(base_slug.encode()).hexdigest()[:12]
|
|
225
|
+
suffix = f"-{digest}"
|
|
226
|
+
|
|
227
|
+
# Parse components from the slug form
|
|
228
|
+
if "_tag_" in base_slug:
|
|
229
|
+
left, tag = base_slug.split("_tag_", 1)
|
|
230
|
+
else:
|
|
231
|
+
left, tag = base_slug, ""
|
|
232
|
+
|
|
233
|
+
parts = left.split("_s_") if left else []
|
|
234
|
+
repo = parts[-1] if parts else left # last path segment is the repo
|
|
235
|
+
|
|
236
|
+
# Reconstruct a compact, identifiable core: "<repo>[_tag_<tag>]"
|
|
237
|
+
ident = repo + (f"_tag_{tag}" if tag else "")
|
|
238
|
+
|
|
239
|
+
# Fit within budget, reserving space for the digest suffix
|
|
240
|
+
visible_budget = max_len - len(suffix)
|
|
241
|
+
assert visible_budget > 0, (
|
|
242
|
+
f"max_len too small to fit digest suffix with length {len(suffix)}"
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
kept = ident[:visible_budget]
|
|
246
|
+
return kept + suffix
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _git_info() -> tuple[str, str]:
|
|
250
|
+
"""
|
|
251
|
+
Get git info (ref, sha) for the current working directory.
|
|
252
|
+
|
|
253
|
+
Priority order for SHA:
|
|
254
|
+
1. SDK_SHA - Explicit override (e.g., for submodule builds)
|
|
255
|
+
2. GITHUB_SHA - GitHub Actions environment
|
|
256
|
+
3. git rev-parse HEAD - Local development
|
|
257
|
+
|
|
258
|
+
Priority order for REF:
|
|
259
|
+
1. SDK_REF - Explicit override (e.g., for submodule builds)
|
|
260
|
+
2. GITHUB_REF - GitHub Actions environment
|
|
261
|
+
3. git symbolic-ref HEAD - Local development
|
|
262
|
+
"""
|
|
263
|
+
sdk_root = _default_sdk_project_root()
|
|
264
|
+
git_sha = os.environ.get("SDK_SHA") or os.environ.get("GITHUB_SHA")
|
|
265
|
+
if not git_sha:
|
|
266
|
+
try:
|
|
267
|
+
git_sha = _run(
|
|
268
|
+
["git", "rev-parse", "--verify", "HEAD"],
|
|
269
|
+
cwd=str(sdk_root),
|
|
270
|
+
).stdout.strip()
|
|
271
|
+
except subprocess.CalledProcessError:
|
|
272
|
+
git_sha = "unknown"
|
|
273
|
+
|
|
274
|
+
git_ref = os.environ.get("SDK_REF") or os.environ.get("GITHUB_REF")
|
|
275
|
+
if not git_ref:
|
|
276
|
+
try:
|
|
277
|
+
git_ref = _run(
|
|
278
|
+
["git", "symbolic-ref", "-q", "--short", "HEAD"],
|
|
279
|
+
cwd=str(sdk_root),
|
|
280
|
+
).stdout.strip()
|
|
281
|
+
except subprocess.CalledProcessError:
|
|
282
|
+
git_ref = "unknown"
|
|
283
|
+
return git_ref, git_sha
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def _package_version() -> str:
|
|
287
|
+
"""
|
|
288
|
+
Get the semantic version from the openhands-sdk package.
|
|
289
|
+
This is used for versioned tags during releases.
|
|
290
|
+
"""
|
|
291
|
+
try:
|
|
292
|
+
from importlib.metadata import version
|
|
293
|
+
|
|
294
|
+
return version("openhands-sdk")
|
|
295
|
+
except Exception:
|
|
296
|
+
# If package is not installed, try reading from pyproject.toml
|
|
297
|
+
try:
|
|
298
|
+
sdk_root = _default_sdk_project_root()
|
|
299
|
+
pyproject_path = sdk_root / "openhands-sdk" / "pyproject.toml"
|
|
300
|
+
if pyproject_path.exists():
|
|
301
|
+
cfg = tomllib.loads(pyproject_path.read_text(encoding="utf-8"))
|
|
302
|
+
return cfg.get("project", {}).get("version", "unknown")
|
|
303
|
+
except Exception:
|
|
304
|
+
pass
|
|
305
|
+
return "unknown"
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
_DEFAULT_GIT_REF, _DEFAULT_GIT_SHA = _git_info()
|
|
309
|
+
_DEFAULT_PACKAGE_VERSION = _package_version()
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
class BuildOptions(BaseModel):
|
|
313
|
+
base_image: str = Field(default="nikolaik/python-nodejs:python3.12-nodejs22")
|
|
314
|
+
custom_tags: str = Field(
|
|
315
|
+
default="", description="Comma-separated list of custom tags."
|
|
316
|
+
)
|
|
317
|
+
image: str = Field(default="ghcr.io/openhands/agent-server")
|
|
318
|
+
target: TargetType = Field(default="binary")
|
|
319
|
+
platforms: list[PlatformType] = Field(default=["linux/amd64"])
|
|
320
|
+
push: bool | None = Field(
|
|
321
|
+
default=None, description="None=auto (CI push, local load)"
|
|
322
|
+
)
|
|
323
|
+
arch: str | None = Field(
|
|
324
|
+
default=None,
|
|
325
|
+
description="Architecture suffix (e.g., 'amd64', 'arm64') to append to tags",
|
|
326
|
+
)
|
|
327
|
+
include_base_tag: bool = Field(
|
|
328
|
+
default=True,
|
|
329
|
+
description=(
|
|
330
|
+
"Whether to include the automatically generated base tag "
|
|
331
|
+
"based on git SHA and base image name in all_tags output."
|
|
332
|
+
),
|
|
333
|
+
)
|
|
334
|
+
include_versioned_tag: bool = Field(
|
|
335
|
+
default=False,
|
|
336
|
+
description=(
|
|
337
|
+
"Whether to include the versioned tag (e.g., v1.0.0_...) in all_tags "
|
|
338
|
+
"output. Should only be True for release builds."
|
|
339
|
+
),
|
|
340
|
+
)
|
|
341
|
+
git_sha: str = Field(
|
|
342
|
+
default=_DEFAULT_GIT_SHA,
|
|
343
|
+
description="Git commit SHA.We will need it to tag the built image.",
|
|
344
|
+
)
|
|
345
|
+
git_ref: str = Field(default=_DEFAULT_GIT_REF)
|
|
346
|
+
sdk_project_root: Path = Field(
|
|
347
|
+
default_factory=_default_sdk_project_root,
|
|
348
|
+
description="Path to OpenHands SDK root. Auto if None.",
|
|
349
|
+
)
|
|
350
|
+
sdk_version: str = Field(
|
|
351
|
+
default=_DEFAULT_PACKAGE_VERSION,
|
|
352
|
+
description=(
|
|
353
|
+
"SDK package version. "
|
|
354
|
+
"We will need it to tag the built image. "
|
|
355
|
+
"Note this is only used if include_versioned_tag is True "
|
|
356
|
+
"(e.g., at each release)."
|
|
357
|
+
),
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
@property
|
|
361
|
+
def short_sha(self) -> str:
|
|
362
|
+
return self.git_sha[:7] if self.git_sha != "unknown" else "unknown"
|
|
363
|
+
|
|
364
|
+
@field_validator("target")
|
|
365
|
+
@classmethod
|
|
366
|
+
def _valid_target(cls, v: str) -> str:
|
|
367
|
+
if v not in VALID_TARGETS:
|
|
368
|
+
raise ValueError(f"target must be one of {sorted(VALID_TARGETS)}")
|
|
369
|
+
return v
|
|
370
|
+
|
|
371
|
+
@property
|
|
372
|
+
def custom_tag_list(self) -> list[str]:
|
|
373
|
+
return [t.strip() for t in self.custom_tags.split(",") if t.strip()]
|
|
374
|
+
|
|
375
|
+
@property
|
|
376
|
+
def base_image_slug(self) -> str:
|
|
377
|
+
return _base_slug(self.base_image)
|
|
378
|
+
|
|
379
|
+
@property
|
|
380
|
+
def versioned_tags(self) -> list[str]:
|
|
381
|
+
"""
|
|
382
|
+
Generate simple version tags for each custom tag variant.
|
|
383
|
+
Returns tags like: 1.2.0-python, 1.2.0-java, 1.2.0-golang
|
|
384
|
+
"""
|
|
385
|
+
return [f"{self.sdk_version}-{t}" for t in self.custom_tag_list]
|
|
386
|
+
|
|
387
|
+
@property
|
|
388
|
+
def base_tag(self) -> str:
|
|
389
|
+
return f"{self.short_sha}-{self.base_image_slug}"
|
|
390
|
+
|
|
391
|
+
@property
|
|
392
|
+
def cache_tags(self) -> tuple[str, str]:
|
|
393
|
+
base = f"buildcache-{self.target}-{self.base_image_slug}"
|
|
394
|
+
if self.git_ref in ("main", "refs/heads/main"):
|
|
395
|
+
return f"{base}-main", base
|
|
396
|
+
elif self.git_ref != "unknown":
|
|
397
|
+
return f"{base}-{_sanitize_branch(self.git_ref)}", base
|
|
398
|
+
else:
|
|
399
|
+
return base, base
|
|
400
|
+
|
|
401
|
+
@property
|
|
402
|
+
def all_tags(self) -> list[str]:
|
|
403
|
+
tags: list[str] = []
|
|
404
|
+
arch_suffix = f"-{self.arch}" if self.arch else ""
|
|
405
|
+
|
|
406
|
+
# Use git commit SHA for commit-based tags
|
|
407
|
+
for t in self.custom_tag_list:
|
|
408
|
+
tags.append(f"{self.image}:{self.short_sha}-{t}{arch_suffix}")
|
|
409
|
+
|
|
410
|
+
if self.git_ref in ("main", "refs/heads/main"):
|
|
411
|
+
for t in self.custom_tag_list:
|
|
412
|
+
tags.append(f"{self.image}:main-{t}{arch_suffix}")
|
|
413
|
+
|
|
414
|
+
if self.include_base_tag:
|
|
415
|
+
tags.append(f"{self.image}:{self.base_tag}{arch_suffix}")
|
|
416
|
+
if self.include_versioned_tag:
|
|
417
|
+
for versioned_tag in self.versioned_tags:
|
|
418
|
+
tags.append(f"{self.image}:{versioned_tag}{arch_suffix}")
|
|
419
|
+
|
|
420
|
+
# Append target suffix for clarity (binary is default, no suffix needed)
|
|
421
|
+
if self.target != "binary":
|
|
422
|
+
tags = [f"{t}-{self.target}" for t in tags]
|
|
423
|
+
return tags
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
# --- build helpers ---
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def _extract_tarball(tarball: Path, dest: Path) -> None:
|
|
430
|
+
dest = dest.resolve()
|
|
431
|
+
dest.mkdir(parents=True, exist_ok=True)
|
|
432
|
+
with tarfile.open(tarball, "r:gz") as tar, chdir(dest):
|
|
433
|
+
# Pre-validate entries
|
|
434
|
+
for m in tar.getmembers():
|
|
435
|
+
name = m.name.lstrip("./")
|
|
436
|
+
p = Path(name)
|
|
437
|
+
if p.is_absolute() or ".." in p.parts:
|
|
438
|
+
raise RuntimeError(f"Unsafe path in sdist: {m.name}")
|
|
439
|
+
# Safe(-r) extraction: no symlinks/devices
|
|
440
|
+
tar.extractall(path=".", filter="data")
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def _make_build_context(sdk_project_root: Path) -> Path:
|
|
444
|
+
dockerfile_path = _get_dockerfile_path(sdk_project_root)
|
|
445
|
+
tmp_root = Path(tempfile.mkdtemp(prefix="agent-build-", dir=None)).resolve()
|
|
446
|
+
sdist_dir = Path(tempfile.mkdtemp(prefix="agent-sdist-", dir=None)).resolve()
|
|
447
|
+
try:
|
|
448
|
+
# sdists = _build_sdists(sdk_project_root, sdist_dir)
|
|
449
|
+
_run(
|
|
450
|
+
["uv", "build", "--sdist", "--out-dir", str(sdist_dir.resolve())],
|
|
451
|
+
cwd=str(sdk_project_root.resolve()),
|
|
452
|
+
)
|
|
453
|
+
sdists = sorted(sdist_dir.glob("*.tar.gz"), key=lambda p: p.stat().st_mtime)
|
|
454
|
+
logger.info(
|
|
455
|
+
f"[build] Built {len(sdists)} sdists for "
|
|
456
|
+
f"clean context: {', '.join(str(s) for s in sdists)}"
|
|
457
|
+
)
|
|
458
|
+
assert len(sdists) == 1, "Expected exactly one sdist"
|
|
459
|
+
logger.debug(
|
|
460
|
+
f"[build] Extracting sdist {sdists[0]} to clean context {tmp_root}"
|
|
461
|
+
)
|
|
462
|
+
_extract_tarball(sdists[0], tmp_root)
|
|
463
|
+
|
|
464
|
+
# assert only one folder created
|
|
465
|
+
entries = list(tmp_root.iterdir())
|
|
466
|
+
assert len(entries) == 1 and entries[0].is_dir(), (
|
|
467
|
+
"Expected single folder in sdist"
|
|
468
|
+
)
|
|
469
|
+
tmp_root = entries[0].resolve()
|
|
470
|
+
# copy Dockerfile into place
|
|
471
|
+
shutil.copy2(dockerfile_path, tmp_root / "Dockerfile")
|
|
472
|
+
logger.debug(f"[build] Clean context ready at {tmp_root}")
|
|
473
|
+
return tmp_root
|
|
474
|
+
except Exception:
|
|
475
|
+
shutil.rmtree(tmp_root, ignore_errors=True)
|
|
476
|
+
raise
|
|
477
|
+
finally:
|
|
478
|
+
shutil.rmtree(sdist_dir, ignore_errors=True)
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def _active_buildx_driver() -> str | None:
|
|
482
|
+
try:
|
|
483
|
+
out = _run(["docker", "buildx", "inspect", "--bootstrap"]).stdout
|
|
484
|
+
for line in out.splitlines():
|
|
485
|
+
s = line.strip()
|
|
486
|
+
if s.startswith("Driver:"):
|
|
487
|
+
return s.split(":", 1)[1].strip()
|
|
488
|
+
except Exception:
|
|
489
|
+
pass
|
|
490
|
+
return None
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def _default_local_cache_dir() -> Path:
|
|
494
|
+
# keep cache outside repo; override with BUILD_CACHE_DIR if wanted
|
|
495
|
+
root = os.environ.get("BUILD_CACHE_DIR")
|
|
496
|
+
if root:
|
|
497
|
+
return Path(root).expanduser().resolve()
|
|
498
|
+
xdg = os.environ.get("XDG_CACHE_HOME", str(Path.home() / ".cache"))
|
|
499
|
+
return Path(xdg) / "openhands" / "buildx-cache"
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
def _get_dockerfile_path(sdk_project_root: Path) -> Path:
|
|
503
|
+
dockerfile_path = (
|
|
504
|
+
sdk_project_root
|
|
505
|
+
/ "openhands-agent-server"
|
|
506
|
+
/ "openhands"
|
|
507
|
+
/ "agent_server"
|
|
508
|
+
/ "docker"
|
|
509
|
+
/ "Dockerfile"
|
|
510
|
+
)
|
|
511
|
+
if not dockerfile_path.exists():
|
|
512
|
+
raise FileNotFoundError(f"Dockerfile not found at {dockerfile_path}")
|
|
513
|
+
return dockerfile_path
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
# --- single entry point ---
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
def build(opts: BuildOptions) -> list[str]:
|
|
520
|
+
"""Single entry point for building the agent-server image."""
|
|
521
|
+
dockerfile_path = _get_dockerfile_path(opts.sdk_project_root)
|
|
522
|
+
push = opts.push
|
|
523
|
+
if push is None:
|
|
524
|
+
push = IN_CI
|
|
525
|
+
|
|
526
|
+
tags = opts.all_tags
|
|
527
|
+
cache_tag, cache_tag_base = opts.cache_tags
|
|
528
|
+
|
|
529
|
+
ctx = _make_build_context(opts.sdk_project_root)
|
|
530
|
+
logger.info(f"[build] Clean build context: {ctx}")
|
|
531
|
+
|
|
532
|
+
args = [
|
|
533
|
+
"docker",
|
|
534
|
+
"buildx",
|
|
535
|
+
"build",
|
|
536
|
+
"--file",
|
|
537
|
+
str(dockerfile_path),
|
|
538
|
+
"--target",
|
|
539
|
+
opts.target,
|
|
540
|
+
"--build-arg",
|
|
541
|
+
f"BASE_IMAGE={opts.base_image}",
|
|
542
|
+
]
|
|
543
|
+
if push:
|
|
544
|
+
args += ["--platform", ",".join(opts.platforms), "--push"]
|
|
545
|
+
else:
|
|
546
|
+
args += ["--load"]
|
|
547
|
+
|
|
548
|
+
for t in tags:
|
|
549
|
+
args += ["--tag", t]
|
|
550
|
+
|
|
551
|
+
# -------- cache strategy --------
|
|
552
|
+
driver = _active_buildx_driver() or "unknown"
|
|
553
|
+
local_cache_dir = _default_local_cache_dir()
|
|
554
|
+
cache_args: list[str] = []
|
|
555
|
+
|
|
556
|
+
if push:
|
|
557
|
+
# Remote/CI builds: use registry cache + inline for maximum reuse.
|
|
558
|
+
cache_args += [
|
|
559
|
+
"--cache-from",
|
|
560
|
+
f"type=registry,ref={opts.image}:{cache_tag}",
|
|
561
|
+
"--cache-from",
|
|
562
|
+
f"type=registry,ref={opts.image}:{cache_tag_base}-main",
|
|
563
|
+
"--cache-to",
|
|
564
|
+
f"type=registry,ref={opts.image}:{cache_tag},mode=max",
|
|
565
|
+
]
|
|
566
|
+
logger.info("[build] Cache: registry (remote/CI) + inline")
|
|
567
|
+
else:
|
|
568
|
+
# Local/dev builds: prefer local dir cache if
|
|
569
|
+
# driver supports it; otherwise inline-only.
|
|
570
|
+
if driver == "docker-container":
|
|
571
|
+
local_cache_dir.mkdir(parents=True, exist_ok=True)
|
|
572
|
+
cache_args += [
|
|
573
|
+
"--cache-from",
|
|
574
|
+
f"type=local,src={str(local_cache_dir)}",
|
|
575
|
+
"--cache-to",
|
|
576
|
+
f"type=local,dest={str(local_cache_dir)},mode=max",
|
|
577
|
+
]
|
|
578
|
+
logger.info(
|
|
579
|
+
f"[build] Cache: local dir at {local_cache_dir} (driver={driver})"
|
|
580
|
+
)
|
|
581
|
+
else:
|
|
582
|
+
logger.warning(
|
|
583
|
+
f"[build] WARNING: Active buildx driver is '{driver}', "
|
|
584
|
+
"which does not support local dir caching. Fallback to INLINE CACHE\n"
|
|
585
|
+
" Consider running the following commands to set up a "
|
|
586
|
+
"compatible buildx environment:\n"
|
|
587
|
+
" 1. docker buildx create --name openhands-builder "
|
|
588
|
+
"--driver docker-container --use\n"
|
|
589
|
+
" 2. docker buildx inspect --bootstrap\n"
|
|
590
|
+
)
|
|
591
|
+
# docker driver can't export caches; fall back to inline metadata only.
|
|
592
|
+
cache_args += ["--build-arg", "BUILDKIT_INLINE_CACHE=1"]
|
|
593
|
+
logger.info(f"[build] Cache: inline only (driver={driver})")
|
|
594
|
+
|
|
595
|
+
args += cache_args + [str(ctx)]
|
|
596
|
+
|
|
597
|
+
logger.info(
|
|
598
|
+
f"[build] Building target='{opts.target}' image='{opts.image}' "
|
|
599
|
+
f"custom_tags='{opts.custom_tags}' from base='{opts.base_image}' "
|
|
600
|
+
f"for platforms='{opts.platforms if push else 'local-arch'}'"
|
|
601
|
+
)
|
|
602
|
+
logger.info(
|
|
603
|
+
f"[build] Git ref='{opts.git_ref}' sha='{opts.git_sha}' "
|
|
604
|
+
f"package_version='{opts.sdk_version}'"
|
|
605
|
+
)
|
|
606
|
+
logger.info(f"[build] Cache tag: {cache_tag}")
|
|
607
|
+
|
|
608
|
+
try:
|
|
609
|
+
res = _run(args, cwd=str(ctx))
|
|
610
|
+
sys.stdout.write(res.stdout or "")
|
|
611
|
+
except subprocess.CalledProcessError as e:
|
|
612
|
+
logger.error(f"[build] ERROR: Build failed with exit code {e.returncode}")
|
|
613
|
+
logger.error(f"[build] Command: {' '.join(e.cmd)}")
|
|
614
|
+
logger.error(f"[build] Full stdout:\n{e.output}")
|
|
615
|
+
logger.error(f"[build] Full stderr:\n{e.stderr}")
|
|
616
|
+
raise
|
|
617
|
+
finally:
|
|
618
|
+
logger.info(f"[build] Cleaning {ctx}")
|
|
619
|
+
shutil.rmtree(ctx, ignore_errors=True)
|
|
620
|
+
|
|
621
|
+
logger.info("[build] Done. Tags:")
|
|
622
|
+
for t in tags:
|
|
623
|
+
logger.info(f" - {t}")
|
|
624
|
+
return tags
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
# --- CLI shim ---
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
def _env(name: str, default: str) -> str:
|
|
631
|
+
v = os.environ.get(name)
|
|
632
|
+
return v if v else default
|
|
633
|
+
|
|
634
|
+
|
|
635
|
+
def main(argv: list[str]) -> int:
|
|
636
|
+
# ---- argparse ----
|
|
637
|
+
parser = argparse.ArgumentParser(
|
|
638
|
+
description="Single-entry build helper for agent-server images."
|
|
639
|
+
)
|
|
640
|
+
parser.add_argument(
|
|
641
|
+
"--base-image",
|
|
642
|
+
default=_env("BASE_IMAGE", "nikolaik/python-nodejs:python3.12-nodejs22"),
|
|
643
|
+
help="Base image to use (default from $BASE_IMAGE).",
|
|
644
|
+
)
|
|
645
|
+
parser.add_argument(
|
|
646
|
+
"--custom-tags",
|
|
647
|
+
default=_env("CUSTOM_TAGS", ""),
|
|
648
|
+
help="Comma-separated custom tags (default from $CUSTOM_TAGS).",
|
|
649
|
+
)
|
|
650
|
+
parser.add_argument(
|
|
651
|
+
"--image",
|
|
652
|
+
default=_env("IMAGE", "ghcr.io/openhands/agent-server"),
|
|
653
|
+
help="Image repo/name (default from $IMAGE).",
|
|
654
|
+
)
|
|
655
|
+
parser.add_argument(
|
|
656
|
+
"--target",
|
|
657
|
+
default=_env("TARGET", "binary"),
|
|
658
|
+
choices=sorted(VALID_TARGETS),
|
|
659
|
+
help="Build target (default from $TARGET).",
|
|
660
|
+
)
|
|
661
|
+
parser.add_argument(
|
|
662
|
+
"--platforms",
|
|
663
|
+
default=_env("PLATFORMS", "linux/amd64,linux/arm64"),
|
|
664
|
+
help="Comma-separated platforms (default from $PLATFORMS).",
|
|
665
|
+
)
|
|
666
|
+
parser.add_argument(
|
|
667
|
+
"--arch",
|
|
668
|
+
default=_env("ARCH", ""),
|
|
669
|
+
help=(
|
|
670
|
+
"Architecture suffix for tags (e.g., 'amd64', 'arm64', default from $ARCH)."
|
|
671
|
+
),
|
|
672
|
+
)
|
|
673
|
+
group = parser.add_mutually_exclusive_group()
|
|
674
|
+
group.add_argument(
|
|
675
|
+
"--push",
|
|
676
|
+
action="store_true",
|
|
677
|
+
help="Force push via buildx (overrides env).",
|
|
678
|
+
)
|
|
679
|
+
group.add_argument(
|
|
680
|
+
"--load",
|
|
681
|
+
action="store_true",
|
|
682
|
+
help="Force local load (overrides env).",
|
|
683
|
+
)
|
|
684
|
+
parser.add_argument(
|
|
685
|
+
"--sdk-project-root",
|
|
686
|
+
type=Path,
|
|
687
|
+
default=None,
|
|
688
|
+
help="Path to OpenHands SDK root (default: auto-detect).",
|
|
689
|
+
)
|
|
690
|
+
parser.add_argument(
|
|
691
|
+
"--build-ctx-only",
|
|
692
|
+
action="store_true",
|
|
693
|
+
help="Only create the clean build context directory and print its path.",
|
|
694
|
+
)
|
|
695
|
+
parser.add_argument(
|
|
696
|
+
"--versioned-tag",
|
|
697
|
+
action="store_true",
|
|
698
|
+
help=(
|
|
699
|
+
"Include versioned tag (e.g., v1.0.0_...) in output. "
|
|
700
|
+
"Should only be used for release builds."
|
|
701
|
+
),
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
args = parser.parse_args(argv)
|
|
705
|
+
|
|
706
|
+
# ---- resolve sdk project root ----
|
|
707
|
+
sdk_project_root = args.sdk_project_root
|
|
708
|
+
if sdk_project_root is None:
|
|
709
|
+
try:
|
|
710
|
+
sdk_project_root = _default_sdk_project_root()
|
|
711
|
+
except Exception as e:
|
|
712
|
+
logger.error(str(e))
|
|
713
|
+
return 1
|
|
714
|
+
|
|
715
|
+
# ---- build-ctx-only path ----
|
|
716
|
+
if args.build_ctx_only:
|
|
717
|
+
ctx = _make_build_context(sdk_project_root)
|
|
718
|
+
logger.info(f"[build] Clean build context (kept for debugging): {ctx}")
|
|
719
|
+
|
|
720
|
+
# Create BuildOptions to generate tags
|
|
721
|
+
opts = BuildOptions(
|
|
722
|
+
base_image=args.base_image,
|
|
723
|
+
custom_tags=args.custom_tags,
|
|
724
|
+
image=args.image,
|
|
725
|
+
target=args.target, # type: ignore
|
|
726
|
+
platforms=[p.strip() for p in args.platforms.split(",") if p.strip()], # type: ignore
|
|
727
|
+
push=None, # Not relevant for build-ctx-only
|
|
728
|
+
sdk_project_root=sdk_project_root,
|
|
729
|
+
arch=args.arch or None,
|
|
730
|
+
include_versioned_tag=args.versioned_tag,
|
|
731
|
+
)
|
|
732
|
+
|
|
733
|
+
# If running in GitHub Actions, write outputs directly to GITHUB_OUTPUT
|
|
734
|
+
github_output = os.environ.get("GITHUB_OUTPUT")
|
|
735
|
+
if github_output:
|
|
736
|
+
with open(github_output, "a") as fh:
|
|
737
|
+
fh.write(f"build_context={ctx}\n")
|
|
738
|
+
fh.write(f"dockerfile={ctx / 'Dockerfile'}\n")
|
|
739
|
+
fh.write(f"tags_csv={','.join(opts.all_tags)}\n")
|
|
740
|
+
# Only output versioned tags if they're being used
|
|
741
|
+
if opts.include_versioned_tag:
|
|
742
|
+
fh.write(f"versioned_tags_csv={','.join(opts.versioned_tags)}\n")
|
|
743
|
+
else:
|
|
744
|
+
fh.write("versioned_tags_csv=\n")
|
|
745
|
+
fh.write(f"base_image_slug={opts.base_image_slug}\n")
|
|
746
|
+
logger.info("[build] Wrote outputs to $GITHUB_OUTPUT")
|
|
747
|
+
|
|
748
|
+
# Also print to stdout for debugging/local use
|
|
749
|
+
print(str(ctx))
|
|
750
|
+
return 0
|
|
751
|
+
|
|
752
|
+
# ---- push/load resolution (CLI wins over env, else auto) ----
|
|
753
|
+
push: bool | None
|
|
754
|
+
if args.push:
|
|
755
|
+
push = True
|
|
756
|
+
elif args.load:
|
|
757
|
+
push = False
|
|
758
|
+
else:
|
|
759
|
+
push = (
|
|
760
|
+
True
|
|
761
|
+
if os.environ.get("PUSH") == "1"
|
|
762
|
+
else False
|
|
763
|
+
if os.environ.get("LOAD") == "1"
|
|
764
|
+
else None
|
|
765
|
+
)
|
|
766
|
+
|
|
767
|
+
# ---- normal build path ----
|
|
768
|
+
opts = BuildOptions(
|
|
769
|
+
base_image=args.base_image,
|
|
770
|
+
custom_tags=args.custom_tags,
|
|
771
|
+
image=args.image,
|
|
772
|
+
target=args.target, # type: ignore
|
|
773
|
+
platforms=[p.strip() for p in args.platforms.split(",") if p.strip()], # type: ignore
|
|
774
|
+
push=push,
|
|
775
|
+
sdk_project_root=sdk_project_root,
|
|
776
|
+
arch=args.arch or None,
|
|
777
|
+
include_versioned_tag=args.versioned_tag,
|
|
778
|
+
)
|
|
779
|
+
tags = build(opts)
|
|
780
|
+
|
|
781
|
+
# --- expose outputs for GitHub Actions ---
|
|
782
|
+
def _write_gha_outputs(
|
|
783
|
+
image: str,
|
|
784
|
+
short_sha: str,
|
|
785
|
+
versioned_tags: list[str],
|
|
786
|
+
tags_list: list[str],
|
|
787
|
+
include_versioned_tag: bool,
|
|
788
|
+
) -> None:
|
|
789
|
+
"""
|
|
790
|
+
If running in GitHub Actions, append step outputs to $GITHUB_OUTPUT.
|
|
791
|
+
- image: repo/name (no tag)
|
|
792
|
+
- short_sha: 7-char SHA
|
|
793
|
+
- versioned_tags_csv: comma-separated list of versioned tags
|
|
794
|
+
(empty if not enabled)
|
|
795
|
+
- tags: multiline output (one per line)
|
|
796
|
+
- tags_csv: single-line, comma-separated
|
|
797
|
+
"""
|
|
798
|
+
out_path = os.environ.get("GITHUB_OUTPUT")
|
|
799
|
+
if not out_path:
|
|
800
|
+
return
|
|
801
|
+
with open(out_path, "a", encoding="utf-8") as fh:
|
|
802
|
+
fh.write(f"image={image}\n")
|
|
803
|
+
fh.write(f"short_sha={short_sha}\n")
|
|
804
|
+
# Only output versioned tags if they're being used
|
|
805
|
+
if include_versioned_tag:
|
|
806
|
+
fh.write(f"versioned_tags_csv={','.join(versioned_tags)}\n")
|
|
807
|
+
else:
|
|
808
|
+
fh.write("versioned_tags_csv=\n")
|
|
809
|
+
fh.write(f"tags_csv={','.join(tags_list)}\n")
|
|
810
|
+
fh.write("tags<<EOF\n")
|
|
811
|
+
fh.write("\n".join(tags_list) + "\n")
|
|
812
|
+
fh.write("EOF\n")
|
|
813
|
+
|
|
814
|
+
_write_gha_outputs(
|
|
815
|
+
opts.image,
|
|
816
|
+
opts.short_sha,
|
|
817
|
+
opts.versioned_tags,
|
|
818
|
+
tags,
|
|
819
|
+
opts.include_versioned_tag,
|
|
820
|
+
)
|
|
821
|
+
return 0
|
|
822
|
+
|
|
823
|
+
|
|
824
|
+
if __name__ == "__main__":
|
|
825
|
+
sys.exit(main(sys.argv[1:]))
|