pygent 0.1.3__tar.gz → 0.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pygent
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: Pygent is a minimalist coding assistant that runs commands in a Docker container when available and falls back to local execution. See https://marianochaves.github.io/pygent for documentation and https://github.com/marianochaves/pygent for the source code.
5
5
  Author-email: Mariano Chaves <mchaves.software@gmail.com>
6
6
  Project-URL: Documentation, https://marianochaves.github.io/pygent
@@ -64,7 +64,10 @@ ag.step("echo 'Hello World'")
64
64
  ag.runtime.cleanup()
65
65
  ```
66
66
 
67
- See the `examples/` folder for more complete scripts.
67
+ See the `examples/` folder for more complete scripts. Models can be swapped by
68
+ passing an object implementing the ``Model`` interface when creating the
69
+ ``Agent``. The default uses an OpenAI-compatible API, but custom models are
70
+ easy to plug in.
68
71
 
69
72
  ### Using OpenAI and other providers
70
73
 
@@ -7,5 +7,6 @@ except _metadata.PackageNotFoundError: # pragma: no cover - fallback for tests
7
7
  __version__ = "0.0.0"
8
8
 
9
9
  from .agent import Agent, run_interactive # noqa: E402,F401, must come after __version__
10
+ from .models import Model, OpenAIModel # noqa: E402,F401
10
11
 
11
- __all__ = ["Agent", "run_interactive"]
12
+ __all__ = ["Agent", "run_interactive", "Model", "OpenAIModel"]
@@ -0,0 +1,4 @@
1
+ from .cli import main
2
+
3
+ if __name__ == "__main__":
4
+ main()
@@ -8,17 +8,14 @@ import time
8
8
  from dataclasses import dataclass, field
9
9
  from typing import Any, Dict, List
10
10
 
11
- try:
12
- import openai # type: ignore
13
- except ModuleNotFoundError: # pragma: no cover - fallback to bundled client
14
- from . import openai_compat as openai
15
11
  from rich.console import Console
16
12
  from rich.panel import Panel
17
13
 
18
14
  from .runtime import Runtime
19
15
  from .tools import TOOL_SCHEMAS, execute_tool
16
+ from .models import Model, OpenAIModel
20
17
 
21
- MODEL = os.getenv("PYGENT_MODEL", "gpt-4.1-mini")
18
+ DEFAULT_MODEL = os.getenv("PYGENT_MODEL", "gpt-4.1-mini")
22
19
  SYSTEM_MSG = (
23
20
  "You are Pygent, a sandboxed coding assistant.\n"
24
21
  "Respond with JSON when you need to use a tool."
@@ -27,26 +24,20 @@ SYSTEM_MSG = (
27
24
  console = Console()
28
25
 
29
26
 
30
- def _chat(messages: List[Dict[str, str]]) -> str:
31
- resp = openai.chat.completions.create(
32
- model=MODEL,
33
- messages=messages,
34
- tools=TOOL_SCHEMAS,
35
- tool_choice="auto",
36
- )
37
- return resp.choices[0].message
38
27
 
39
28
 
40
29
  @dataclass
41
30
  class Agent:
42
31
  runtime: Runtime = field(default_factory=Runtime)
32
+ model: Model = field(default_factory=OpenAIModel)
33
+ model_name: str = DEFAULT_MODEL
43
34
  history: List[Dict[str, Any]] = field(default_factory=lambda: [
44
35
  {"role": "system", "content": SYSTEM_MSG}
45
36
  ])
46
37
 
47
38
  def step(self, user_msg: str) -> None:
48
39
  self.history.append({"role": "user", "content": user_msg})
49
- assistant_msg = _chat(self.history)
40
+ assistant_msg = self.model.chat(self.history, self.model_name, TOOL_SCHEMAS)
50
41
  self.history.append(assistant_msg)
51
42
 
52
43
  if assistant_msg.tool_calls:
@@ -0,0 +1,33 @@
1
+ from __future__ import annotations
2
+
3
+ """Model interface and default implementation for OpenAI-compatible APIs."""
4
+
5
+ from typing import Any, Dict, List, Protocol
6
+
7
+ try:
8
+ import openai # type: ignore
9
+ except ModuleNotFoundError: # pragma: no cover - fallback to bundled client
10
+ from . import openai_compat as openai
11
+
12
+ from .openai_compat import Message
13
+
14
+
15
+ class Model(Protocol):
16
+ """Protocol for chat models used by :class:`~pygent.agent.Agent`."""
17
+
18
+ def chat(self, messages: List[Dict[str, Any]], model: str, tools: Any) -> Message:
19
+ """Return the assistant message for the given prompt."""
20
+ ...
21
+
22
+
23
+ class OpenAIModel:
24
+ """Default model using the OpenAI-compatible API."""
25
+
26
+ def chat(self, messages: List[Dict[str, Any]], model: str, tools: Any) -> Message:
27
+ resp = openai.chat.completions.create(
28
+ model=model,
29
+ messages=messages,
30
+ tools=tools,
31
+ tool_choice="auto",
32
+ )
33
+ return resp.choices[0].message
@@ -1,6 +1,6 @@
1
- from .agent import Agent, _chat
1
+ from .agent import Agent
2
2
  from .runtime import Runtime
3
- from .tools import execute_tool
3
+ from .tools import execute_tool, TOOL_SCHEMAS
4
4
 
5
5
 
6
6
  def run_gui(use_docker: bool | None = None) -> None:
@@ -16,7 +16,7 @@ def run_gui(use_docker: bool | None = None) -> None:
16
16
 
17
17
  def _respond(message: str, history: list[tuple[str, str]] | None) -> str:
18
18
  agent.history.append({"role": "user", "content": message})
19
- assistant_msg = _chat(agent.history)
19
+ assistant_msg = agent.model.chat(agent.history, agent.model_name, TOOL_SCHEMAS)
20
20
  agent.history.append(assistant_msg)
21
21
  reply = assistant_msg.content or ""
22
22
  if assistant_msg.tool_calls:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pygent
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: Pygent is a minimalist coding assistant that runs commands in a Docker container when available and falls back to local execution. See https://marianochaves.github.io/pygent for documentation and https://github.com/marianochaves/pygent for the source code.
5
5
  Author-email: Mariano Chaves <mchaves.software@gmail.com>
6
6
  Project-URL: Documentation, https://marianochaves.github.io/pygent
@@ -2,8 +2,10 @@ LICENSE
2
2
  README.md
3
3
  pyproject.toml
4
4
  pygent/__init__.py
5
+ pygent/__main__.py
5
6
  pygent/agent.py
6
7
  pygent/cli.py
8
+ pygent/models.py
7
9
  pygent/openai_compat.py
8
10
  pygent/py.typed
9
11
  pygent/runtime.py
@@ -15,5 +17,6 @@ pygent.egg-info/dependency_links.txt
15
17
  pygent.egg-info/entry_points.txt
16
18
  pygent.egg-info/requires.txt
17
19
  pygent.egg-info/top_level.txt
20
+ tests/test_custom_model.py
18
21
  tests/test_tools.py
19
22
  tests/test_version.py
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "pygent"
3
- version = "0.1.3"
3
+ version = "0.1.5"
4
4
  description = "Pygent is a minimalist coding assistant that runs commands in a Docker container when available and falls back to local execution. See https://marianochaves.github.io/pygent for documentation and https://github.com/marianochaves/pygent for the source code."
5
5
  authors = [ { name = "Mariano Chaves", email = "mchaves.software@gmail.com" } ]
6
6
  requires-python = ">=3.9"
@@ -0,0 +1,28 @@
1
+ import os
2
+ import sys
3
+ import types
4
+
5
+ sys.modules.setdefault('openai', types.ModuleType('openai'))
6
+ sys.modules.setdefault('docker', types.ModuleType('docker'))
7
+ rich_mod = types.ModuleType('rich')
8
+ console_mod = types.ModuleType('console')
9
+ console_mod.Console = lambda *a, **k: type('C', (), {'print': lambda *a, **k: None})()
10
+ panel_mod = types.ModuleType('panel')
11
+ panel_mod.Panel = lambda *a, **k: None
12
+ sys.modules.setdefault('rich', rich_mod)
13
+ sys.modules.setdefault('rich.console', console_mod)
14
+ sys.modules.setdefault('rich.panel', panel_mod)
15
+
16
+ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
17
+
18
+ from pygent import Agent, openai_compat
19
+
20
+ class DummyModel:
21
+ def chat(self, messages, model, tools):
22
+ return openai_compat.Message(role='assistant', content='ok')
23
+
24
+
25
+ def test_custom_model():
26
+ ag = Agent(model=DummyModel())
27
+ ag.step('hi')
28
+ assert ag.history[-1].content == 'ok'
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes