pygent 0.1.2__tar.gz → 0.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pygent
3
- Version: 0.1.2
3
+ Version: 0.1.4
4
4
  Summary: Pygent is a minimalist coding assistant that runs commands in a Docker container when available and falls back to local execution. See https://marianochaves.github.io/pygent for documentation and https://github.com/marianochaves/pygent for the source code.
5
5
  Author-email: Mariano Chaves <mchaves.software@gmail.com>
6
6
  Project-URL: Documentation, https://marianochaves.github.io/pygent
@@ -16,4 +16,6 @@ Provides-Extra: docs
16
16
  Requires-Dist: mkdocs; extra == "docs"
17
17
  Provides-Extra: docker
18
18
  Requires-Dist: docker>=7.0.0; extra == "docker"
19
+ Provides-Extra: ui
20
+ Requires-Dist: gradio; extra == "ui"
19
21
  Dynamic: license-file
@@ -8,6 +8,7 @@ Pygent is a coding assistant that executes each request inside an isolated Docke
8
8
  * Integrates with OpenAI-compatible models to orchestrate each step.
9
9
  * Persists the conversation history during the session.
10
10
  * Provides a small Python API for use in other projects.
11
+ * Optional web interface via `pygent-ui`.
11
12
 
12
13
  ## Installation
13
14
 
@@ -26,7 +27,10 @@ To run commands in Docker containers also install `pygent[docker]`.
26
27
  Behaviour can be adjusted via environment variables:
27
28
 
28
29
  * `OPENAI_API_KEY` &ndash; key used to access the OpenAI API.
29
- * `PYGENT_MODEL` &ndash; model name used for requests (default `gpt-4o-mini-preview`).
30
+ Set this to your API key or a key from any compatible provider.
31
+ * `OPENAI_BASE_URL` &ndash; base URL for OpenAI-compatible APIs
32
+ (defaults to ``https://api.openai.com/v1``).
33
+ * `PYGENT_MODEL` &ndash; model name used for requests (default `gpt-4.1-mini`).
30
34
  * `PYGENT_IMAGE` &ndash; Docker image to create the container (default `python:3.12-slim`).
31
35
  * `PYGENT_USE_DOCKER` &ndash; set to `0` to disable Docker and run locally.
32
36
 
@@ -42,7 +46,10 @@ Use `--docker` to run commands inside a container (requires
42
46
  `pygent[docker]`). Use `--no-docker` or set `PYGENT_USE_DOCKER=0`
43
47
  to force local execution.
44
48
 
45
- Type messages normally; use `/exit` to end the session. Each command is executed in the container and the result shown in the terminal.
49
+ Type messages normally; use `/exit` to end the session. Each command is executed
50
+ in the container and the result shown in the terminal.
51
+ For a minimal web interface run `pygent-ui` instead (requires `pygent[ui]`).
52
+
46
53
 
47
54
  ## API usage
48
55
 
@@ -57,7 +64,26 @@ ag.step("echo 'Hello World'")
57
64
  ag.runtime.cleanup()
58
65
  ```
59
66
 
60
- See the `examples/` folder for more complete scripts.
67
+ See the `examples/` folder for more complete scripts. Models can be swapped by
68
+ passing an object implementing the ``Model`` interface when creating the
69
+ ``Agent``. The default uses an OpenAI-compatible API, but custom models are
70
+ easy to plug in.
71
+
72
+ ### Using OpenAI and other providers
73
+
74
+ Set your OpenAI key:
75
+
76
+ ```bash
77
+ export OPENAI_API_KEY="sk-..."
78
+ ```
79
+
80
+ To use a different provider, set `OPENAI_BASE_URL` to the provider
81
+ endpoint and keep `OPENAI_API_KEY` pointing to the correct key:
82
+
83
+ ```bash
84
+ export OPENAI_BASE_URL="https://openrouter.ai/api/v1"
85
+ export OPENAI_API_KEY="your-provider-key"
86
+ ```
61
87
 
62
88
  ## Development
63
89
 
@@ -7,5 +7,6 @@ except _metadata.PackageNotFoundError: # pragma: no cover - fallback for tests
7
7
  __version__ = "0.0.0"
8
8
 
9
9
  from .agent import Agent, run_interactive # noqa: E402,F401, must come after __version__
10
+ from .models import Model, OpenAIModel # noqa: E402,F401
10
11
 
11
- __all__ = ["Agent", "run_interactive"]
12
+ __all__ = ["Agent", "run_interactive", "Model", "OpenAIModel"]
@@ -8,17 +8,14 @@ import time
8
8
  from dataclasses import dataclass, field
9
9
  from typing import Any, Dict, List
10
10
 
11
- try:
12
- import openai # type: ignore
13
- except ModuleNotFoundError: # pragma: no cover - fallback to bundled client
14
- from . import openai_compat as openai
15
11
  from rich.console import Console
16
12
  from rich.panel import Panel
17
13
 
18
14
  from .runtime import Runtime
19
15
  from .tools import TOOL_SCHEMAS, execute_tool
16
+ from .models import Model, OpenAIModel
20
17
 
21
- MODEL = os.getenv("PYGENT_MODEL", "gpt-4o-mini-preview")
18
+ DEFAULT_MODEL = os.getenv("PYGENT_MODEL", "gpt-4.1-mini")
22
19
  SYSTEM_MSG = (
23
20
  "You are Pygent, a sandboxed coding assistant.\n"
24
21
  "Respond with JSON when you need to use a tool."
@@ -27,26 +24,20 @@ SYSTEM_MSG = (
27
24
  console = Console()
28
25
 
29
26
 
30
- def _chat(messages: List[Dict[str, str]]) -> str:
31
- resp = openai.chat.completions.create(
32
- model=MODEL,
33
- messages=messages,
34
- tools=TOOL_SCHEMAS,
35
- tool_choice="auto",
36
- )
37
- return resp.choices[0].message
38
27
 
39
28
 
40
29
  @dataclass
41
30
  class Agent:
42
31
  runtime: Runtime = field(default_factory=Runtime)
32
+ model: Model = field(default_factory=OpenAIModel)
33
+ model_name: str = DEFAULT_MODEL
43
34
  history: List[Dict[str, Any]] = field(default_factory=lambda: [
44
35
  {"role": "system", "content": SYSTEM_MSG}
45
36
  ])
46
37
 
47
38
  def step(self, user_msg: str) -> None:
48
39
  self.history.append({"role": "user", "content": user_msg})
49
- assistant_msg = _chat(self.history)
40
+ assistant_msg = self.model.chat(self.history, self.model_name, TOOL_SCHEMAS)
50
41
  self.history.append(assistant_msg)
51
42
 
52
43
  if assistant_msg.tool_calls:
@@ -0,0 +1,33 @@
1
+ from __future__ import annotations
2
+
3
+ """Model interface and default implementation for OpenAI-compatible APIs."""
4
+
5
+ from typing import Any, Dict, List, Protocol
6
+
7
+ try:
8
+ import openai # type: ignore
9
+ except ModuleNotFoundError: # pragma: no cover - fallback to bundled client
10
+ from . import openai_compat as openai
11
+
12
+ from .openai_compat import Message
13
+
14
+
15
+ class Model(Protocol):
16
+ """Protocol for chat models used by :class:`~pygent.agent.Agent`."""
17
+
18
+ def chat(self, messages: List[Dict[str, Any]], model: str, tools: Any) -> Message:
19
+ """Return the assistant message for the given prompt."""
20
+ ...
21
+
22
+
23
+ class OpenAIModel:
24
+ """Default model using the OpenAI-compatible API."""
25
+
26
+ def chat(self, messages: List[Dict[str, Any]], model: str, tools: Any) -> Message:
27
+ resp = openai.chat.completions.create(
28
+ model=model,
29
+ messages=messages,
30
+ tools=tools,
31
+ tool_choice="auto",
32
+ )
33
+ return resp.choices[0].message
@@ -0,0 +1,36 @@
1
+ from .agent import Agent
2
+ from .runtime import Runtime
3
+ from .tools import execute_tool, TOOL_SCHEMAS
4
+
5
+
6
+ def run_gui(use_docker: bool | None = None) -> None:
7
+ """Launch a simple Gradio chat interface."""
8
+ try:
9
+ import gradio as gr
10
+ except ModuleNotFoundError as exc: # pragma: no cover - optional
11
+ raise SystemExit(
12
+ "Gradio is required for the GUI. Install with 'pip install pygent[ui]'"
13
+ ) from exc
14
+
15
+ agent = Agent(runtime=Runtime(use_docker=use_docker))
16
+
17
+ def _respond(message: str, history: list[tuple[str, str]] | None) -> str:
18
+ agent.history.append({"role": "user", "content": message})
19
+ assistant_msg = agent.model.chat(agent.history, agent.model_name, TOOL_SCHEMAS)
20
+ agent.history.append(assistant_msg)
21
+ reply = assistant_msg.content or ""
22
+ if assistant_msg.tool_calls:
23
+ for call in assistant_msg.tool_calls:
24
+ output = execute_tool(call, agent.runtime)
25
+ agent.history.append({"role": "tool", "content": output, "tool_call_id": call.id})
26
+ reply += f"\n\n[tool:{call.function.name}]\n{output}"
27
+ return reply
28
+
29
+ try:
30
+ gr.ChatInterface(_respond, title="Pygent").launch()
31
+ finally:
32
+ agent.runtime.cleanup()
33
+
34
+
35
+ def main() -> None: # pragma: no cover
36
+ run_gui()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pygent
3
- Version: 0.1.2
3
+ Version: 0.1.4
4
4
  Summary: Pygent is a minimalist coding assistant that runs commands in a Docker container when available and falls back to local execution. See https://marianochaves.github.io/pygent for documentation and https://github.com/marianochaves/pygent for the source code.
5
5
  Author-email: Mariano Chaves <mchaves.software@gmail.com>
6
6
  Project-URL: Documentation, https://marianochaves.github.io/pygent
@@ -16,4 +16,6 @@ Provides-Extra: docs
16
16
  Requires-Dist: mkdocs; extra == "docs"
17
17
  Provides-Extra: docker
18
18
  Requires-Dist: docker>=7.0.0; extra == "docker"
19
+ Provides-Extra: ui
20
+ Requires-Dist: gradio; extra == "ui"
19
21
  Dynamic: license-file
@@ -4,15 +4,18 @@ pyproject.toml
4
4
  pygent/__init__.py
5
5
  pygent/agent.py
6
6
  pygent/cli.py
7
+ pygent/models.py
7
8
  pygent/openai_compat.py
8
9
  pygent/py.typed
9
10
  pygent/runtime.py
10
11
  pygent/tools.py
12
+ pygent/ui.py
11
13
  pygent.egg-info/PKG-INFO
12
14
  pygent.egg-info/SOURCES.txt
13
15
  pygent.egg-info/dependency_links.txt
14
16
  pygent.egg-info/entry_points.txt
15
17
  pygent.egg-info/requires.txt
16
18
  pygent.egg-info/top_level.txt
19
+ tests/test_custom_model.py
17
20
  tests/test_tools.py
18
21
  tests/test_version.py
@@ -1,2 +1,3 @@
1
1
  [console_scripts]
2
2
  pygent = pygent.cli:main
3
+ pygent-ui = pygent.ui:main
@@ -11,3 +11,6 @@ openai>=1.0.0
11
11
 
12
12
  [test]
13
13
  pytest
14
+
15
+ [ui]
16
+ gradio
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "pygent"
3
- version = "0.1.2"
3
+ version = "0.1.4"
4
4
  description = "Pygent is a minimalist coding assistant that runs commands in a Docker container when available and falls back to local execution. See https://marianochaves.github.io/pygent for documentation and https://github.com/marianochaves/pygent for the source code."
5
5
  authors = [ { name = "Mariano Chaves", email = "mchaves.software@gmail.com" } ]
6
6
  requires-python = ">=3.9"
@@ -13,6 +13,7 @@ llm = ["openai>=1.0.0"] # OpenAI-compatible library (optional)
13
13
  test = ["pytest"]
14
14
  docs = ["mkdocs"]
15
15
  docker = ["docker>=7.0.0"]
16
+ ui = ["gradio"]
16
17
 
17
18
  [project.urls]
18
19
  Documentation = "https://marianochaves.github.io/pygent"
@@ -20,6 +21,7 @@ Repository = "https://github.com/marianochaves/pygent"
20
21
 
21
22
  [project.scripts]
22
23
  pygent = "pygent.cli:main"
24
+ pygent-ui = "pygent.ui:main"
23
25
 
24
26
 
25
27
  [tool.setuptools.package-data]
@@ -0,0 +1,28 @@
1
+ import os
2
+ import sys
3
+ import types
4
+
5
+ sys.modules.setdefault('openai', types.ModuleType('openai'))
6
+ sys.modules.setdefault('docker', types.ModuleType('docker'))
7
+ rich_mod = types.ModuleType('rich')
8
+ console_mod = types.ModuleType('console')
9
+ console_mod.Console = lambda *a, **k: type('C', (), {'print': lambda *a, **k: None})()
10
+ panel_mod = types.ModuleType('panel')
11
+ panel_mod.Panel = lambda *a, **k: None
12
+ sys.modules.setdefault('rich', rich_mod)
13
+ sys.modules.setdefault('rich.console', console_mod)
14
+ sys.modules.setdefault('rich.panel', panel_mod)
15
+
16
+ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
17
+
18
+ from pygent import Agent, openai_compat
19
+
20
+ class DummyModel:
21
+ def chat(self, messages, model, tools):
22
+ return openai_compat.Message(role='assistant', content='ok')
23
+
24
+
25
+ def test_custom_model():
26
+ ag = Agent(model=DummyModel())
27
+ ag.step('hi')
28
+ assert ag.history[-1].content == 'ok'
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes