code-puppy 0.0.141__py3-none-any.whl → 0.0.143__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,7 @@ import httpx
8
8
  from anthropic import AsyncAnthropic
9
9
  from openai import AsyncAzureOpenAI # For Azure OpenAI client
10
10
  from pydantic_ai.models.anthropic import AnthropicModel
11
+ from pydantic_ai.models.fallback import infer_model
11
12
  from pydantic_ai.models.gemini import GeminiModel
12
13
  from pydantic_ai.models.openai import OpenAIChatModel
13
14
  from pydantic_ai.providers.anthropic import AnthropicProvider
@@ -18,6 +19,7 @@ from pydantic_ai.providers.cerebras import CerebrasProvider
18
19
  from . import callbacks
19
20
  from .config import EXTRA_MODELS_FILE
20
21
  from .http_utils import create_async_client
22
+ from .round_robin_model import RoundRobinModel
21
23
 
22
24
  # Environment variables used in this module:
23
25
  # - GEMINI_API_KEY: API key for Google's Gemini models. Required when using Gemini models.
@@ -246,5 +248,22 @@ class ModelFactory:
246
248
  model = OpenAIChatModel(model_name=model_config["name"], provider=provider)
247
249
  setattr(model, "provider", provider)
248
250
  return model
251
+
252
+ elif model_type == "round_robin":
253
+ # Get the list of model names to use in the round-robin
254
+ model_names = model_config.get("models")
255
+ if not model_names or not isinstance(model_names, list):
256
+ raise ValueError(f"Round-robin model '{model_name}' requires a 'models' list in its configuration.")
257
+
258
+ # Resolve each model name to an actual model instance
259
+ models = []
260
+ for name in model_names:
261
+ # Recursively get each model using the factory
262
+ model = ModelFactory.get_model(name, config)
263
+ models.append(model)
264
+
265
+ # Create and return the round-robin model
266
+ return RoundRobinModel(*models)
267
+
249
268
  else:
250
269
  raise ValueError(f"Unsupported model type: {model_type}")
@@ -0,0 +1,102 @@
1
+ from dataclasses import dataclass, field
2
+ from typing import Any, Callable, AsyncIterator, List
3
+ from contextlib import asynccontextmanager, suppress
4
+ from pydantic_ai.models import Model, ModelMessage, ModelSettings, ModelRequestParameters, ModelResponse, StreamedResponse
5
+ from pydantic_ai.models.fallback import KnownModelName, infer_model, merge_model_settings
6
+ from pydantic_ai.result import RunContext
7
+
8
+ @dataclass(init=False)
9
+ class RoundRobinModel(Model):
10
+ """A model that cycles through multiple models in a round-robin fashion.
11
+
12
+ This model distributes requests across multiple candidate models to help
13
+ overcome rate limits or distribute load.
14
+ """
15
+
16
+ models: List[Model]
17
+ _current_index: int = field(default=0, repr=False)
18
+ _model_name: str = field(repr=False)
19
+
20
+ def __init__(
21
+ self,
22
+ *models: Model | KnownModelName | str,
23
+ ):
24
+ """Initialize a round-robin model instance.
25
+
26
+ Args:
27
+ models: The names or instances of models to cycle through.
28
+ """
29
+ super().__init__()
30
+ if not models:
31
+ raise ValueError("At least one model must be provided")
32
+ self.models = [infer_model(m) for m in models]
33
+ self._current_index = 0
34
+
35
+ @property
36
+ def model_name(self) -> str:
37
+ """The model name showing this is a round-robin model with its candidates."""
38
+ return f'round_robin:{",".join(model.model_name for model in self.models)}'
39
+
40
+ @property
41
+ def system(self) -> str:
42
+ """System prompt from the current model."""
43
+ return self.models[self._current_index].system
44
+
45
+ @property
46
+ def base_url(self) -> str | None:
47
+ """Base URL from the current model."""
48
+ return self.models[self._current_index].base_url
49
+
50
+ def _get_next_model(self) -> Model:
51
+ """Get the next model in the round-robin sequence and update the index."""
52
+ model = self.models[self._current_index]
53
+ self._current_index = (self._current_index + 1) % len(self.models)
54
+ return model
55
+
56
+ async def request(
57
+ self,
58
+ messages: list[ModelMessage],
59
+ model_settings: ModelSettings | None,
60
+ model_request_parameters: ModelRequestParameters,
61
+ ) -> ModelResponse:
62
+ """Make a request using the next model in the round-robin sequence."""
63
+ current_model = self._get_next_model()
64
+ merged_settings = merge_model_settings(current_model.settings, model_settings)
65
+ customized_model_request_parameters = current_model.customize_request_parameters(model_request_parameters)
66
+
67
+ try:
68
+ response = await current_model.request(messages, merged_settings, customized_model_request_parameters)
69
+ self._set_span_attributes(current_model)
70
+ return response
71
+ except Exception as exc:
72
+ # Unlike FallbackModel, we don't try other models here
73
+ # The round-robin strategy is about distribution, not failover
74
+ raise exc
75
+
76
+ @asynccontextmanager
77
+ async def request_stream(
78
+ self,
79
+ messages: list[ModelMessage],
80
+ model_settings: ModelSettings | None,
81
+ model_request_parameters: ModelRequestParameters,
82
+ run_context: RunContext[Any] | None = None,
83
+ ) -> AsyncIterator[StreamedResponse]:
84
+ """Make a streaming request using the next model in the round-robin sequence."""
85
+ current_model = self._get_next_model()
86
+ merged_settings = merge_model_settings(current_model.settings, model_settings)
87
+ customized_model_request_parameters = current_model.customize_request_parameters(model_request_parameters)
88
+
89
+ async with current_model.request_stream(
90
+ messages, merged_settings, customized_model_request_parameters, run_context
91
+ ) as response:
92
+ self._set_span_attributes(current_model)
93
+ yield response
94
+
95
+ def _set_span_attributes(self, model: Model):
96
+ """Set span attributes for observability."""
97
+ with suppress(Exception):
98
+ span = get_current_span()
99
+ if span.is_recording():
100
+ attributes = getattr(span, 'attributes', {})
101
+ if attributes.get('gen_ai.request.model') == self.model_name:
102
+ span.set_attributes(model.model_attributes(model))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: code-puppy
3
- Version: 0.0.141
3
+ Version: 0.0.143
4
4
  Summary: Code generation agent
5
5
  Project-URL: repository, https://github.com/mpfaffenberger/code_puppy
6
6
  Project-URL: HomePage, https://github.com/mpfaffenberger/code_puppy
@@ -6,9 +6,10 @@ code_puppy/config.py,sha256=9yWKHKjLJ2Ddl4frrBI9VRIwPvoWpIx1fAd1YpAvOSQ,15330
6
6
  code_puppy/http_utils.py,sha256=BAvt4hed7fVMXglA7eS9gOb08h2YTuOyai6VmQq09fg,3432
7
7
  code_puppy/main.py,sha256=Vv5HSJnkgZhCvvOoXrJ2zqM5P-i47-RcYAU00Z1Pfx0,21733
8
8
  code_puppy/message_history_processor.py,sha256=O2rKp7W6YeIg93W8b0XySTUEQgIZm0f_06--_kzHugM,16145
9
- code_puppy/model_factory.py,sha256=NoG9wDTosaaDrFIGtq3oq8gDe0J_7N6CUKuesXz87qM,10878
9
+ code_puppy/model_factory.py,sha256=kTVaHNm6S1cLw6vHE6kH0WS6JZLRoZ8qFGKCp_fdDM4,11756
10
10
  code_puppy/models.json,sha256=dAfpMMI2EEeOMv0ynHSmMuJAYDLcZrs5gCLX3voC4-A,3252
11
11
  code_puppy/reopenable_async_client.py,sha256=4UJRaMp5np8cbef9F0zKQ7TPKOfyf5U-Kv-0zYUWDho,8274
12
+ code_puppy/round_robin_model.py,sha256=3-WmVDbxrPATHFbmYp_sskn5669jYDtDq9P6lrWrl0o,4300
12
13
  code_puppy/state_management.py,sha256=o4mNBCPblRyVrNBH-992-1YqffgH6AKHU7iZRqgP1LI,5925
13
14
  code_puppy/status_display.py,sha256=F6eEAkGePDp4StM2BWj-uLLQTDGtJrf0IufzCeP1rRg,8336
14
15
  code_puppy/summarization_agent.py,sha256=-e6yUGZ22ahSaF0y7QhgVcQBfx5ktNUkPxBIWQfPaA4,3275
@@ -125,9 +126,9 @@ code_puppy/tui/tests/test_sidebar_history_navigation.py,sha256=JGiyua8A2B8dLfwiE
125
126
  code_puppy/tui/tests/test_status_bar.py,sha256=nYT_FZGdmqnnbn6o0ZuOkLtNUtJzLSmtX8P72liQ5Vo,1797
126
127
  code_puppy/tui/tests/test_timestamped_history.py,sha256=nVXt9hExZZ_8MFP-AZj4L4bB_1Eo_mc-ZhVICzTuw3I,1799
127
128
  code_puppy/tui/tests/test_tools.py,sha256=kgzzAkK4r0DPzQwHHD4cePpVNgrHor6cFr05Pg6DBWg,2687
128
- code_puppy-0.0.141.data/data/code_puppy/models.json,sha256=dAfpMMI2EEeOMv0ynHSmMuJAYDLcZrs5gCLX3voC4-A,3252
129
- code_puppy-0.0.141.dist-info/METADATA,sha256=09qtNO_oD5YGszPMVCR3mZ2YsvcqrXn-PU31tLRE8sE,19873
130
- code_puppy-0.0.141.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
131
- code_puppy-0.0.141.dist-info/entry_points.txt,sha256=d8YkBvIUxF-dHNJAj-x4fPEqizbY5d_TwvYpc01U5kw,58
132
- code_puppy-0.0.141.dist-info/licenses/LICENSE,sha256=31u8x0SPgdOq3izJX41kgFazWsM43zPEF9eskzqbJMY,1075
133
- code_puppy-0.0.141.dist-info/RECORD,,
129
+ code_puppy-0.0.143.data/data/code_puppy/models.json,sha256=dAfpMMI2EEeOMv0ynHSmMuJAYDLcZrs5gCLX3voC4-A,3252
130
+ code_puppy-0.0.143.dist-info/METADATA,sha256=XbQIbiQFjwgUAnpB0_-ObMgoro79rBMivTNi_5Zzebk,19873
131
+ code_puppy-0.0.143.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
132
+ code_puppy-0.0.143.dist-info/entry_points.txt,sha256=d8YkBvIUxF-dHNJAj-x4fPEqizbY5d_TwvYpc01U5kw,58
133
+ code_puppy-0.0.143.dist-info/licenses/LICENSE,sha256=31u8x0SPgdOq3izJX41kgFazWsM43zPEF9eskzqbJMY,1075
134
+ code_puppy-0.0.143.dist-info/RECORD,,