ninetrix 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. ninetrix-0.1.0/PKG-INFO +126 -0
  2. ninetrix-0.1.0/README.md +108 -0
  3. ninetrix-0.1.0/agentfile/__init__.py +1 -0
  4. ninetrix-0.1.0/agentfile/cli.py +136 -0
  5. ninetrix-0.1.0/agentfile/commands/__init__.py +0 -0
  6. ninetrix-0.1.0/agentfile/commands/auth.py +120 -0
  7. ninetrix-0.1.0/agentfile/commands/build.py +172 -0
  8. ninetrix-0.1.0/agentfile/commands/compose.py +557 -0
  9. ninetrix-0.1.0/agentfile/commands/deploy.py +90 -0
  10. ninetrix-0.1.0/agentfile/commands/dev.py +265 -0
  11. ninetrix-0.1.0/agentfile/commands/doctor.py +273 -0
  12. ninetrix-0.1.0/agentfile/commands/down.py +144 -0
  13. ninetrix-0.1.0/agentfile/commands/gateway.py +448 -0
  14. ninetrix-0.1.0/agentfile/commands/init.py +82 -0
  15. ninetrix-0.1.0/agentfile/commands/invoke.py +170 -0
  16. ninetrix-0.1.0/agentfile/commands/logs.py +199 -0
  17. ninetrix-0.1.0/agentfile/commands/mcp.py +302 -0
  18. ninetrix-0.1.0/agentfile/commands/restart.py +144 -0
  19. ninetrix-0.1.0/agentfile/commands/rollback.py +148 -0
  20. ninetrix-0.1.0/agentfile/commands/run.py +325 -0
  21. ninetrix-0.1.0/agentfile/commands/status.py +233 -0
  22. ninetrix-0.1.0/agentfile/commands/trace.py +316 -0
  23. ninetrix-0.1.0/agentfile/commands/up.py +318 -0
  24. ninetrix-0.1.0/agentfile/commands/validate.py +263 -0
  25. ninetrix-0.1.0/agentfile/core/__init__.py +0 -0
  26. ninetrix-0.1.0/agentfile/core/auth.py +64 -0
  27. ninetrix-0.1.0/agentfile/core/docker.py +130 -0
  28. ninetrix-0.1.0/agentfile/core/errors.py +99 -0
  29. ninetrix-0.1.0/agentfile/core/mcp_registry.py +153 -0
  30. ninetrix-0.1.0/agentfile/core/models.py +634 -0
  31. ninetrix-0.1.0/agentfile/core/schema.json +461 -0
  32. ninetrix-0.1.0/agentfile/core/template_context.py +178 -0
  33. ninetrix-0.1.0/agentfile/templates/Dockerfile.j2 +96 -0
  34. ninetrix-0.1.0/agentfile/templates/agentfile.yaml.j2 +83 -0
  35. ninetrix-0.1.0/agentfile/templates/entrypoint.py.j2 +2234 -0
  36. ninetrix-0.1.0/ninetrix.egg-info/PKG-INFO +126 -0
  37. ninetrix-0.1.0/ninetrix.egg-info/SOURCES.txt +41 -0
  38. ninetrix-0.1.0/ninetrix.egg-info/dependency_links.txt +1 -0
  39. ninetrix-0.1.0/ninetrix.egg-info/entry_points.txt +3 -0
  40. ninetrix-0.1.0/ninetrix.egg-info/requires.txt +9 -0
  41. ninetrix-0.1.0/ninetrix.egg-info/top_level.txt +1 -0
  42. ninetrix-0.1.0/pyproject.toml +35 -0
  43. ninetrix-0.1.0/setup.cfg +4 -0
@@ -0,0 +1,126 @@
1
+ Metadata-Version: 2.4
2
+ Name: ninetrix
3
+ Version: 0.1.0
4
+ Summary: CLI for building and deploying AI agents as containers
5
+ License: MIT
6
+ Keywords: ai,agents,cli,docker,automation
7
+ Requires-Python: >=3.10
8
+ Description-Content-Type: text/markdown
9
+ Requires-Dist: click>=8.1
10
+ Requires-Dist: rich>=13.7
11
+ Requires-Dist: pyyaml>=6.0
12
+ Requires-Dist: jinja2>=3.1
13
+ Requires-Dist: docker>=7.0
14
+ Requires-Dist: mcp>=1.0
15
+ Requires-Dist: jsonschema>=4.0
16
+ Requires-Dist: psycopg[binary]>=3.0
17
+ Requires-Dist: httpx>=0.27
18
+
19
+ # ninetrix
20
+
21
+ Build and deploy AI agents as Docker containers. Define your agent in YAML, ship it anywhere Docker runs.
22
+
23
+ ```bash
24
+ pip install ninetrix
25
+ ```
26
+
27
+ ---
28
+
29
+ ## Quickstart
30
+
31
+ ```bash
32
+ # Scaffold a new agent
33
+ ninetrix init --name my-agent --provider anthropic
34
+
35
+ # Build the container image
36
+ ninetrix build --file ninetrix.yaml
37
+
38
+ # Run it interactively
39
+ ninetrix run --file ninetrix.yaml
40
+ ```
41
+
42
+ ## Multi-agent crews
43
+
44
+ ```bash
45
+ # Start all agents on a shared Docker network
46
+ ninetrix up --file ninetrix.yaml
47
+
48
+ # Trigger the orchestrator
49
+ ninetrix invoke --agent orchestrator -m "Research Python history and write a summary"
50
+
51
+ # Stream logs from all agents
52
+ ninetrix logs --file ninetrix.yaml
53
+
54
+ # Visualize the execution trace
55
+ ninetrix trace --thread-id <id>
56
+
57
+ # Tear down
58
+ ninetrix down --file ninetrix.yaml
59
+ ```
60
+
61
+ ## ninetrix.yaml
62
+
63
+ ```yaml
64
+ agents:
65
+ orchestrator:
66
+ metadata:
67
+ role: "Research Orchestrator"
68
+ goal: "Coordinate search and synthesis"
69
+ runtime:
70
+ provider: anthropic
71
+ model: claude-sonnet-4-6
72
+ tools:
73
+ - { name: search, source: mcp://duckduckgo }
74
+ collaborators: [researcher, writer]
75
+ governance:
76
+ max_budget_per_run: 1.00
77
+ human_approval: true
78
+ triggers:
79
+ - type: webhook
80
+ endpoint: /run
81
+
82
+ researcher:
83
+ runtime: { model: claude-haiku-4-5-20251001 }
84
+ tools:
85
+ - { name: search, source: mcp://duckduckgo }
86
+ - { name: files, source: mcp://filesystem }
87
+
88
+ writer:
89
+ runtime: { model: claude-sonnet-4-6, temperature: 0.7 }
90
+ tools:
91
+ - { name: files, source: mcp://filesystem }
92
+ ```
93
+
94
+ ## Commands
95
+
96
+ | Command | Description |
97
+ |---|---|
98
+ | `ninetrix init` | Scaffold a new `ninetrix.yaml` |
99
+ | `ninetrix build` | Build container images |
100
+ | `ninetrix run` | Run a single agent interactively |
101
+ | `ninetrix up` | Start all agents on a Docker bridge network |
102
+ | `ninetrix down` | Stop and remove all crew containers |
103
+ | `ninetrix status` | Show running agent containers |
104
+ | `ninetrix logs` | Stream logs from all agents |
105
+ | `ninetrix invoke` | POST a message to a running agent |
106
+ | `ninetrix trace` | Render a multi-agent execution tree |
107
+ | `ninetrix mcp list` | List available MCP tool servers |
108
+
109
+ ## Environment variables
110
+
111
+ | Variable | Description |
112
+ |---|---|
113
+ | `ANTHROPIC_API_KEY` | Anthropic API key |
114
+ | `OPENAI_API_KEY` | OpenAI API key |
115
+ | `DATABASE_URL` | PostgreSQL URL for persistence |
116
+ | `ninetrix_PROVIDER` | Override model provider at runtime |
117
+ | `ninetrix_MODEL` | Override model at runtime |
118
+
119
+ ## Requirements
120
+
121
+ - Python 3.10+
122
+ - Docker
123
+
124
+ ## License
125
+
126
+ MIT
@@ -0,0 +1,108 @@
1
+ # ninetrix
2
+
3
+ Build and deploy AI agents as Docker containers. Define your agent in YAML, ship it anywhere Docker runs.
4
+
5
+ ```bash
6
+ pip install ninetrix
7
+ ```
8
+
9
+ ---
10
+
11
+ ## Quickstart
12
+
13
+ ```bash
14
+ # Scaffold a new agent
15
+ ninetrix init --name my-agent --provider anthropic
16
+
17
+ # Build the container image
18
+ ninetrix build --file ninetrix.yaml
19
+
20
+ # Run it interactively
21
+ ninetrix run --file ninetrix.yaml
22
+ ```
23
+
24
+ ## Multi-agent crews
25
+
26
+ ```bash
27
+ # Start all agents on a shared Docker network
28
+ ninetrix up --file ninetrix.yaml
29
+
30
+ # Trigger the orchestrator
31
+ ninetrix invoke --agent orchestrator -m "Research Python history and write a summary"
32
+
33
+ # Stream logs from all agents
34
+ ninetrix logs --file ninetrix.yaml
35
+
36
+ # Visualize the execution trace
37
+ ninetrix trace --thread-id <id>
38
+
39
+ # Tear down
40
+ ninetrix down --file ninetrix.yaml
41
+ ```
42
+
43
+ ## ninetrix.yaml
44
+
45
+ ```yaml
46
+ agents:
47
+ orchestrator:
48
+ metadata:
49
+ role: "Research Orchestrator"
50
+ goal: "Coordinate search and synthesis"
51
+ runtime:
52
+ provider: anthropic
53
+ model: claude-sonnet-4-6
54
+ tools:
55
+ - { name: search, source: mcp://duckduckgo }
56
+ collaborators: [researcher, writer]
57
+ governance:
58
+ max_budget_per_run: 1.00
59
+ human_approval: true
60
+ triggers:
61
+ - type: webhook
62
+ endpoint: /run
63
+
64
+ researcher:
65
+ runtime: { model: claude-haiku-4-5-20251001 }
66
+ tools:
67
+ - { name: search, source: mcp://duckduckgo }
68
+ - { name: files, source: mcp://filesystem }
69
+
70
+ writer:
71
+ runtime: { model: claude-sonnet-4-6, temperature: 0.7 }
72
+ tools:
73
+ - { name: files, source: mcp://filesystem }
74
+ ```
75
+
76
+ ## Commands
77
+
78
+ | Command | Description |
79
+ |---|---|
80
+ | `ninetrix init` | Scaffold a new `ninetrix.yaml` |
81
+ | `ninetrix build` | Build container images |
82
+ | `ninetrix run` | Run a single agent interactively |
83
+ | `ninetrix up` | Start all agents on a Docker bridge network |
84
+ | `ninetrix down` | Stop and remove all crew containers |
85
+ | `ninetrix status` | Show running agent containers |
86
+ | `ninetrix logs` | Stream logs from all agents |
87
+ | `ninetrix invoke` | POST a message to a running agent |
88
+ | `ninetrix trace` | Render a multi-agent execution tree |
89
+ | `ninetrix mcp list` | List available MCP tool servers |
90
+
91
+ ## Environment variables
92
+
93
+ | Variable | Description |
94
+ |---|---|
95
+ | `ANTHROPIC_API_KEY` | Anthropic API key |
96
+ | `OPENAI_API_KEY` | OpenAI API key |
97
+ | `DATABASE_URL` | PostgreSQL URL for persistence |
98
+ | `ninetrix_PROVIDER` | Override model provider at runtime |
99
+ | `ninetrix_MODEL` | Override model at runtime |
100
+
101
+ ## Requirements
102
+
103
+ - Python 3.10+
104
+ - Docker
105
+
106
+ ## License
107
+
108
+ MIT
@@ -0,0 +1 @@
1
+ __version__ = "0.1.0"
@@ -0,0 +1,136 @@
1
+ """Ninetrix CLI entry point."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import sys
7
+
8
+ import click
9
+ from rich.console import Console
10
+
11
+ from agentfile import __version__
12
+ from agentfile.commands.init import init_cmd
13
+ from agentfile.commands.build import build_cmd
14
+ from agentfile.commands.run import run_cmd
15
+ from agentfile.commands.deploy import deploy_cmd
16
+ from agentfile.commands.mcp import mcp_cmd
17
+ from agentfile.commands.up import up_cmd
18
+ from agentfile.commands.down import down_cmd
19
+ from agentfile.commands.status import status_cmd
20
+ from agentfile.commands.logs import logs_cmd
21
+ from agentfile.commands.invoke import invoke_cmd
22
+ from agentfile.commands.trace import trace_cmd
23
+ from agentfile.commands.restart import restart_cmd
24
+ from agentfile.commands.rollback import rollback_cmd
25
+ from agentfile.commands.doctor import doctor_cmd
26
+ from agentfile.commands.validate import validate_cmd
27
+ from agentfile.commands.auth import auth_cmd
28
+ from agentfile.commands.compose import compose_cmd
29
+ from agentfile.commands.gateway import gateway_cmd
30
+ from agentfile.commands.dev import dev_command
31
+
32
+ console = Console()
33
+
34
+ LOGO = "[bold purple] Ninetrix [/bold purple]"
35
+
36
+
37
+ @click.group()
38
+ @click.version_option(__version__, "--version", "-V")
39
+ def cli() -> None:
40
+ """[bold]Ninetrix[/bold] — build and deploy AI agents as containers.
41
+
42
+ \b
43
+ Quick start:
44
+ ninetrix init scaffold agentfile.yaml
45
+ ninetrix build build Docker image(s)
46
+ ninetrix run run entry agent locally
47
+ ninetrix deploy push & deploy to a registry
48
+ ninetrix mcp list inspect MCP tool integrations
49
+
50
+ \b
51
+ Multi-agent warm pool:
52
+ ninetrix up start all agents on a Docker network
53
+ ninetrix status show agent container status
54
+ ninetrix invoke send a message to a running agent
55
+ ninetrix logs stream agent container logs
56
+ ninetrix trace visualize a multi-agent run
57
+ ninetrix restart rebuild and restart one agent
58
+ ninetrix rollback switch one agent to a previous image tag
59
+ ninetrix down stop the warm pool
60
+
61
+ \b
62
+ Compose deployment:
63
+ ninetrix compose generate docker-compose.yml for any cloud
64
+
65
+ \b
66
+ MCP Gateway:
67
+ ninetrix gateway start start local gateway + worker stack
68
+ ninetrix gateway status show connected workers and tools
69
+ ninetrix gateway stop tear down the gateway stack
70
+
71
+ \b
72
+ Local environment:
73
+ ninetrix dev start local server (API + MCP gateway + dashboard)
74
+
75
+ \b
76
+ Utilities:
77
+ ninetrix validate lint agentfile.yaml without building
78
+ ninetrix doctor check Docker, API, pool, and env vars
79
+ ninetrix auth manage API authentication
80
+ """
81
+ pass
82
+
83
+
84
+ # Register sub-commands
85
+ cli.add_command(init_cmd, name="init")
86
+ cli.add_command(build_cmd, name="build")
87
+ cli.add_command(run_cmd, name="run")
88
+ cli.add_command(deploy_cmd, name="deploy")
89
+ cli.add_command(mcp_cmd, name="mcp")
90
+ cli.add_command(up_cmd, name="up")
91
+ cli.add_command(down_cmd, name="down")
92
+ cli.add_command(status_cmd, name="status")
93
+ cli.add_command(logs_cmd, name="logs")
94
+ cli.add_command(invoke_cmd, name="invoke")
95
+ cli.add_command(trace_cmd, name="trace")
96
+ cli.add_command(restart_cmd, name="restart")
97
+ cli.add_command(rollback_cmd, name="rollback")
98
+ cli.add_command(doctor_cmd, name="doctor")
99
+ cli.add_command(validate_cmd, name="validate")
100
+ cli.add_command(auth_cmd, name="auth")
101
+ cli.add_command(compose_cmd, name="compose")
102
+ cli.add_command(gateway_cmd, name="gateway")
103
+ cli.add_command(dev_command, name="dev")
104
+
105
+
106
+ def main() -> None:
107
+ debug = os.environ.get("NINETRIX_DEBUG") == "1"
108
+ try:
109
+ cli(standalone_mode=False)
110
+ except click.exceptions.Exit as exc:
111
+ sys.exit(exc.exit_code)
112
+ except click.exceptions.Abort:
113
+ console.print("\n[yellow]Aborted.[/yellow]")
114
+ sys.exit(1)
115
+ except SystemExit:
116
+ raise
117
+ except Exception as exc:
118
+ if debug:
119
+ raise
120
+ # Friendly last-resort handler — individual commands should catch their
121
+ # own exceptions and give better messages, but this prevents raw tracebacks.
122
+ from docker.errors import DockerException
123
+ from agentfile.core.errors import fmt_docker_error
124
+ if isinstance(exc, DockerException):
125
+ msg, hint = fmt_docker_error(exc)
126
+ console.print(f"\n [red]✗[/red] Docker error: {msg}")
127
+ if hint:
128
+ console.print(f" [dim]Hint: {hint}[/dim]")
129
+ else:
130
+ console.print(f"\n [red]✗[/red] {exc}")
131
+ console.print(" [dim]Set NINETRIX_DEBUG=1 for the full traceback.[/dim]")
132
+ sys.exit(1)
133
+
134
+
135
+ if __name__ == "__main__":
136
+ main()
File without changes
@@ -0,0 +1,120 @@
1
+ """ninetrix auth — manage authentication with the Ninetrix API."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import os
6
+
7
+ import click
8
+ import httpx
9
+ from rich.console import Console
10
+
11
+ from agentfile.core.auth import (
12
+ SECRET_FILE,
13
+ TOKEN_FILE,
14
+ auth_headers,
15
+ clear_token,
16
+ save_token,
17
+ )
18
+
19
+ console = Console()
20
+
21
+
22
+ @click.group("auth")
23
+ def auth_cmd() -> None:
24
+ """Manage authentication with the Ninetrix API hub."""
25
+ pass
26
+
27
+
28
+ @auth_cmd.command("login")
29
+ @click.option("--token", "-t", required=True, metavar="TOKEN",
30
+ help="Personal access token from the Ninetrix dashboard (Settings → API Keys)")
31
+ @click.option("--api-url", default=None, metavar="URL",
32
+ help="API URL (overrides AGENTFILE_API_URL, default: http://localhost:8000)")
33
+ def auth_login(token: str, api_url: str | None) -> None:
34
+ """Save an API token — enables credential injection in ninetrix run/up."""
35
+ url = api_url or os.environ.get("AGENTFILE_API_URL", "http://localhost:8000")
36
+ console.print()
37
+
38
+ # Verify the token against the live API before saving
39
+ try:
40
+ resp = httpx.get(
41
+ f"{url}/integrations/credentials",
42
+ headers={"Authorization": f"Bearer {token}"},
43
+ timeout=5,
44
+ )
45
+ if resp.status_code == 401:
46
+ console.print("[red]Token rejected — double-check it's correct.[/red]\n")
47
+ raise SystemExit(1)
48
+ if resp.status_code not in (200,):
49
+ console.print(
50
+ f" [yellow]Warning:[/yellow] API returned {resp.status_code}. "
51
+ "Saving anyway."
52
+ )
53
+ except httpx.ConnectError:
54
+ console.print(
55
+ f" [yellow]Warning:[/yellow] Could not reach [dim]{url}[/dim] — saving token anyway."
56
+ )
57
+
58
+ save_token(token)
59
+ console.print(f" [green]✓[/green] Token saved → [dim]{TOKEN_FILE}[/dim]")
60
+ console.print(f" [dim]API:[/dim] {url}\n")
61
+
62
+
63
+ @auth_cmd.command("logout")
64
+ def auth_logout() -> None:
65
+ """Remove the stored API token."""
66
+ console.print()
67
+ if not TOKEN_FILE.exists():
68
+ console.print(" [dim]No token stored — nothing to remove.[/dim]\n")
69
+ return
70
+ clear_token()
71
+ console.print(" [green]✓[/green] Token removed.\n")
72
+
73
+
74
+ @auth_cmd.command("status")
75
+ @click.option("--api-url", default=None, metavar="URL", help="API URL to check")
76
+ def auth_status(api_url: str | None) -> None:
77
+ """Show which auth method is active and whether the API is reachable."""
78
+ url = api_url or os.environ.get("AGENTFILE_API_URL", "http://localhost:8000")
79
+ console.print()
80
+ console.print("[bold]ninetrix auth status[/bold]\n")
81
+
82
+ # Determine which source the token comes from
83
+ if os.environ.get("AGENTFILE_API_TOKEN"):
84
+ method = "env var [dim](AGENTFILE_API_TOKEN)[/dim]"
85
+ elif TOKEN_FILE.exists():
86
+ try:
87
+ data = json.loads(TOKEN_FILE.read_text())
88
+ if data.get("token"):
89
+ method = f"token file [dim]({TOKEN_FILE})[/dim]"
90
+ else:
91
+ method = "[dim]token file (empty)[/dim]"
92
+ except Exception:
93
+ method = "[dim]token file (unreadable)[/dim]"
94
+ elif SECRET_FILE.exists():
95
+ method = f"machine secret [dim]({SECRET_FILE})[/dim]"
96
+ else:
97
+ method = "[yellow]none[/yellow]"
98
+
99
+ console.print(f" [dim]Auth method:[/dim] {method}")
100
+ console.print(f" [dim]API URL: [/dim] {url}")
101
+
102
+ # Connectivity check
103
+ try:
104
+ resp = httpx.get(
105
+ f"{url}/integrations/credentials",
106
+ headers=auth_headers(url),
107
+ timeout=3,
108
+ )
109
+ if resp.status_code == 200:
110
+ console.print(" [dim]Status: [/dim] [green]✓ connected[/green]")
111
+ elif resp.status_code == 401:
112
+ console.print(
113
+ " [dim]Status: [/dim] [red]✗ auth failed[/red] "
114
+ "[dim]— run [bold]ninetrix auth login --token <token>[/bold][/dim]"
115
+ )
116
+ else:
117
+ console.print(f" [dim]Status: [/dim] [yellow]HTTP {resp.status_code}[/yellow]")
118
+ except httpx.ConnectError:
119
+ console.print(" [dim]Status: [/dim] [dim]API not reachable[/dim]")
120
+ console.print()
@@ -0,0 +1,172 @@
1
+ """agentfile build — validate + Dockerfile + docker build."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import shutil
6
+ import tempfile
7
+ from concurrent.futures import ThreadPoolExecutor, as_completed
8
+ from pathlib import Path
9
+
10
+ import click
11
+ from jinja2 import Environment, PackageLoader
12
+ from rich.console import Console
13
+
14
+ from agentfile.core.models import AgentDef, AgentFile
15
+ from agentfile.core.docker import build_image
16
+ from agentfile.core.template_context import build_context
17
+
18
+ console = Console()
19
+
20
+
21
+ def _render_templates(agent_def: AgentDef, af: AgentFile, context_dir: Path) -> None:
22
+ """Render Dockerfile and entrypoint.py into context_dir for a single agent."""
23
+ env = Environment(
24
+ loader=PackageLoader("agentfile", "templates"),
25
+ keep_trailing_newline=True,
26
+ )
27
+
28
+ ctx = build_context(
29
+ af,
30
+ agent_def,
31
+ is_saas_runner=False,
32
+ has_invoke_server=agent_def.serve,
33
+ _warn=lambda msg: console.print(f" [yellow]Warning:[/yellow] {msg}"),
34
+ )
35
+
36
+ dockerfile = env.get_template("Dockerfile.j2").render(**ctx)
37
+ (context_dir / "Dockerfile").write_text(dockerfile)
38
+
39
+ entrypoint = env.get_template("entrypoint.py.j2").render(**ctx)
40
+ (context_dir / "entrypoint.py").write_text(entrypoint)
41
+
42
+
43
+ def _build_one(
44
+ agent_name: str, agent_def: AgentDef, af: AgentFile,
45
+ agentfile_path: str, tag: str,
46
+ ) -> tuple[bool, str, list[str]]:
47
+ """Render templates and build one image in a worker thread.
48
+
49
+ Returns (success, full_tag, log_lines). Never prints to console directly
50
+ so it is safe to call from multiple threads simultaneously.
51
+ """
52
+ import docker as _docker
53
+ from docker.errors import DockerException as _DE
54
+
55
+ lines: list[str] = []
56
+ with tempfile.TemporaryDirectory(prefix=f"agentfile-build-{agent_name}-") as tmp:
57
+ ctx = Path(tmp)
58
+ shutil.copy(agentfile_path, ctx / "agentfile.yaml")
59
+ _render_templates(agent_def, af, ctx)
60
+ full_tag = agent_def.image_name(tag)
61
+ try:
62
+ client = _docker.from_env()
63
+ _img, logs = client.images.build(
64
+ path=str(ctx), tag=full_tag, rm=True, forcerm=True,
65
+ )
66
+ for chunk in logs:
67
+ line = chunk.get("stream", "").rstrip()
68
+ if line:
69
+ lines.append(line)
70
+ return True, full_tag, lines
71
+ except _DE as exc:
72
+ return False, full_tag, [str(exc)]
73
+
74
+
75
+ @click.command("build")
76
+ @click.option("--file", "-f", "agentfile_path", default="agentfile.yaml",
77
+ show_default=True, help="Path to agentfile.yaml")
78
+ @click.option("--tag", "-t", default="latest", show_default=True,
79
+ help="Docker image tag")
80
+ @click.option("--push", is_flag=True, default=False,
81
+ help="Push the image(s) after building")
82
+ @click.option("--agent", "agent_filter", default=None,
83
+ help="Build only this agent key (multi-agent files)")
84
+ @click.option("--environment", "environment", default=None, metavar="NAME",
85
+ help="Apply environment overlay from agentfile.yaml (e.g. dev, prod)")
86
+ def build_cmd(agentfile_path: str, tag: str, push: bool, agent_filter: str | None,
87
+ environment: str | None) -> None:
88
+ """Validate agentfile.yaml and build Docker image(s)."""
89
+ console.print()
90
+ console.print("[bold purple]ninetrix build[/bold purple]\n")
91
+
92
+ console.print(f" Reading [bold]{agentfile_path}[/bold] …")
93
+ try:
94
+ af = AgentFile.from_path(agentfile_path)
95
+ except (FileNotFoundError, ValueError) as exc:
96
+ console.print(f"[red]{exc}[/red]")
97
+ raise SystemExit(1)
98
+
99
+ if environment:
100
+ if environment not in af.environments:
101
+ available = ", ".join(af.environments.keys()) or "none defined"
102
+ console.print(f"[red]Environment '{environment}' not found.[/red] Available: {available}")
103
+ raise SystemExit(1)
104
+ af = af.for_env(environment)
105
+ console.print(f" [dim]Environment:[/dim] [bold]{environment}[/bold]")
106
+
107
+ errors = af.validate()
108
+ if errors:
109
+ console.print("[red]Validation failed:[/red]")
110
+ for e in errors:
111
+ console.print(f" • {e}")
112
+ raise SystemExit(1)
113
+ console.print(" [green]✓[/green] Agentfile is valid")
114
+
115
+ if agent_filter:
116
+ if agent_filter not in af.agents:
117
+ console.print(f"[red]Agent '{agent_filter}' not found in agentfile.[/red]")
118
+ console.print(f" Available agents: {', '.join(af.agents.keys())}")
119
+ raise SystemExit(1)
120
+ agents_to_build = {agent_filter: af.agents[agent_filter]}
121
+ else:
122
+ agents_to_build = af.agents
123
+
124
+ built_refs: list[str] = []
125
+
126
+ if len(agents_to_build) == 1:
127
+ # Single agent — stream docker output verbosely as before
128
+ agent_name, agent_def = next(iter(agents_to_build.items()))
129
+ with tempfile.TemporaryDirectory(prefix=f"agentfile-build-{agent_name}-") as tmp:
130
+ ctx = Path(tmp)
131
+ shutil.copy(agentfile_path, ctx / "agentfile.yaml")
132
+ _render_templates(agent_def, af, ctx)
133
+ image_ref = build_image(ctx, agent_def.image_name(), tag)
134
+ built_refs.append(image_ref)
135
+ else:
136
+ # Multi-agent — build all images in parallel, show spinner + summary
137
+ names = list(agents_to_build.keys())
138
+ console.print(
139
+ f" [dim]Building {len(names)} image(s) in parallel: {', '.join(names)}[/dim]"
140
+ )
141
+ results: dict[str, tuple[bool, str, list[str]]] = {}
142
+ with console.status(" Building images…", spinner="dots"):
143
+ with ThreadPoolExecutor(max_workers=len(agents_to_build)) as pool:
144
+ futures = {
145
+ pool.submit(_build_one, name, adef, af, agentfile_path, tag): name
146
+ for name, adef in agents_to_build.items()
147
+ }
148
+ for future in as_completed(futures):
149
+ results[futures[future]] = future.result()
150
+
151
+ any_failed = False
152
+ for name in names: # print in declaration order
153
+ ok, ref, lines = results[name]
154
+ if ok:
155
+ console.print(f" [green]✓[/green] Built [bold]{ref}[/bold]")
156
+ built_refs.append(ref)
157
+ else:
158
+ msg = lines[-1] if lines else "unknown error"
159
+ console.print(f" [red]✗[/red] Failed to build [bold]{name}[/bold]: {msg}")
160
+ any_failed = True
161
+ if any_failed:
162
+ raise SystemExit(1)
163
+
164
+ if push:
165
+ from agentfile.core.docker import push_image
166
+ for ref in built_refs:
167
+ push_image(ref)
168
+
169
+ if len(built_refs) == 1:
170
+ console.print(f"\n Run it with:\n [bold]ninetrix run --image {built_refs[0]}[/bold]\n")
171
+ else:
172
+ console.print(f"\n Start the warm pool with:\n [bold]ninetrix up --file {agentfile_path}[/bold]\n")