code-puppy 0.0.165__tar.gz → 0.0.167__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {code_puppy-0.0.165 → code_puppy-0.0.167}/PKG-INFO +2 -1
- code_puppy-0.0.167/code_puppy/http_utils.py +225 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/managed_server.py +2 -4
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/server_registry_catalog.py +5 -6
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/message_history_processor.py +88 -123
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/command_runner.py +45 -10
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/file_operations.py +42 -7
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/custom_widgets.py +7 -2
- {code_puppy-0.0.165 → code_puppy-0.0.167}/pyproject.toml +2 -1
- code_puppy-0.0.165/code_puppy/http_utils.py +0 -122
- {code_puppy-0.0.165 → code_puppy-0.0.167}/.gitignore +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/LICENSE +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/README.md +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/__main__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agent.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/agent_code_puppy.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/agent_creator_agent.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/agent_manager.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/agent_orchestrator.json +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/base_agent.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/json_agent.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/agents/runtime_manager.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/callbacks.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/command_handler.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/file_path_completion.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/load_context_completion.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/add_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/base.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/handler.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/help_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/install_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/list_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/logs_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/remove_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/restart_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/search_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/start_all_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/start_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/status_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/stop_all_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/stop_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/test_command.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/utils.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/mcp/wizard_utils.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/meta_command_handler.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/model_picker_completion.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/motd.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/prompt_toolkit_completion.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/command_line/utils.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/config.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/main.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/async_lifecycle.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/blocking_startup.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/captured_stdio_server.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/circuit_breaker.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/config_wizard.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/dashboard.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/error_isolation.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/examples/retry_example.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/health_monitor.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/manager.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/registry.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/retry_manager.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/status_tracker.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/mcp/system_tools.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/message_queue.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/queue_console.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/renderers.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/spinner/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/spinner/console_spinner.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/spinner/spinner_base.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/messaging/spinner/textual_spinner.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/model_factory.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/models.json +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/plugins/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/reopenable_async_client.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/round_robin_model.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/state_management.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/status_display.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/summarization_agent.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/agent_tools.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/common.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/file_modifications.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tools/tools_content.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/app.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/chat_view.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/command_history_modal.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/copy_button.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/human_input_modal.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/input_area.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/sidebar.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/components/status_bar.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/messages.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/models/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/models/chat_message.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/models/command_history.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/models/enums.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/screens/__init__.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/screens/help.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/screens/mcp_install_wizard.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/screens/settings.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/tui/screens/tools.py +0 -0
- {code_puppy-0.0.165 → code_puppy-0.0.167}/code_puppy/version_checker.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: code-puppy
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.167
|
|
4
4
|
Summary: Code generation agent
|
|
5
5
|
Project-URL: repository, https://github.com/mpfaffenberger/code_puppy
|
|
6
6
|
Project-URL: HomePage, https://github.com/mpfaffenberger/code_puppy
|
|
@@ -33,6 +33,7 @@ Requires-Dist: rapidfuzz>=3.13.0
|
|
|
33
33
|
Requires-Dist: rich>=13.4.2
|
|
34
34
|
Requires-Dist: ripgrep>=14.1.0
|
|
35
35
|
Requires-Dist: ruff>=0.11.11
|
|
36
|
+
Requires-Dist: tenacity>=8.2.0
|
|
36
37
|
Requires-Dist: termcolor>=3.1.0
|
|
37
38
|
Requires-Dist: textual-dev>=1.7.0
|
|
38
39
|
Requires-Dist: textual>=5.0.0
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HTTP utilities module for code-puppy.
|
|
3
|
+
|
|
4
|
+
This module provides functions for creating properly configured HTTP clients.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import socket
|
|
9
|
+
from typing import Dict, Optional, Union
|
|
10
|
+
|
|
11
|
+
import httpx
|
|
12
|
+
import requests
|
|
13
|
+
from tenacity import retry_if_exception_type, stop_after_attempt, wait_exponential
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
from pydantic_ai.retries import (
|
|
17
|
+
AsyncTenacityTransport,
|
|
18
|
+
RetryConfig,
|
|
19
|
+
TenacityTransport,
|
|
20
|
+
wait_retry_after,
|
|
21
|
+
)
|
|
22
|
+
except ImportError:
|
|
23
|
+
# Fallback if pydantic_ai.retries is not available
|
|
24
|
+
AsyncTenacityTransport = None
|
|
25
|
+
RetryConfig = None
|
|
26
|
+
TenacityTransport = None
|
|
27
|
+
wait_retry_after = None
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
from .reopenable_async_client import ReopenableAsyncClient
|
|
31
|
+
except ImportError:
|
|
32
|
+
ReopenableAsyncClient = None
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
from .messaging import emit_info
|
|
36
|
+
except ImportError:
|
|
37
|
+
# Fallback if messaging system is not available
|
|
38
|
+
def emit_info(content: str, **metadata):
|
|
39
|
+
pass # No-op if messaging system is not available
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_cert_bundle_path() -> str:
|
|
43
|
+
# First check if SSL_CERT_FILE environment variable is set
|
|
44
|
+
ssl_cert_file = os.environ.get("SSL_CERT_FILE")
|
|
45
|
+
if ssl_cert_file and os.path.exists(ssl_cert_file):
|
|
46
|
+
return ssl_cert_file
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def create_client(
|
|
50
|
+
timeout: int = 180,
|
|
51
|
+
verify: Union[bool, str] = None,
|
|
52
|
+
headers: Optional[Dict[str, str]] = None,
|
|
53
|
+
retry_status_codes: tuple = (429, 502, 503, 504),
|
|
54
|
+
) -> httpx.Client:
|
|
55
|
+
if verify is None:
|
|
56
|
+
verify = get_cert_bundle_path()
|
|
57
|
+
|
|
58
|
+
# If retry components are available, create a client with retry transport
|
|
59
|
+
if TenacityTransport and RetryConfig and wait_retry_after:
|
|
60
|
+
def should_retry_status(response):
|
|
61
|
+
"""Raise exceptions for retryable HTTP status codes."""
|
|
62
|
+
if response.status_code in retry_status_codes:
|
|
63
|
+
emit_info(f"HTTP retry: Retrying request due to status code {response.status_code}")
|
|
64
|
+
response.raise_for_status()
|
|
65
|
+
|
|
66
|
+
transport = TenacityTransport(
|
|
67
|
+
config=RetryConfig(
|
|
68
|
+
retry=lambda e: isinstance(e, httpx.HTTPStatusError) and e.response.status_code in retry_status_codes,
|
|
69
|
+
wait=wait_retry_after(
|
|
70
|
+
fallback_strategy=wait_exponential(multiplier=1, max=60),
|
|
71
|
+
max_wait=300
|
|
72
|
+
),
|
|
73
|
+
stop=stop_after_attempt(5),
|
|
74
|
+
reraise=True
|
|
75
|
+
),
|
|
76
|
+
validate_response=should_retry_status
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
return httpx.Client(transport=transport, verify=verify, headers=headers or {}, timeout=timeout)
|
|
80
|
+
else:
|
|
81
|
+
# Fallback to regular client if retry components are not available
|
|
82
|
+
return httpx.Client(verify=verify, headers=headers or {}, timeout=timeout)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def create_async_client(
|
|
86
|
+
timeout: int = 180,
|
|
87
|
+
verify: Union[bool, str] = None,
|
|
88
|
+
headers: Optional[Dict[str, str]] = None,
|
|
89
|
+
retry_status_codes: tuple = (429, 502, 503, 504),
|
|
90
|
+
) -> httpx.AsyncClient:
|
|
91
|
+
if verify is None:
|
|
92
|
+
verify = get_cert_bundle_path()
|
|
93
|
+
|
|
94
|
+
# If retry components are available, create a client with retry transport
|
|
95
|
+
if AsyncTenacityTransport and RetryConfig and wait_retry_after:
|
|
96
|
+
def should_retry_status(response):
|
|
97
|
+
"""Raise exceptions for retryable HTTP status codes."""
|
|
98
|
+
if response.status_code in retry_status_codes:
|
|
99
|
+
emit_info(f"HTTP retry: Retrying request due to status code {response.status_code}")
|
|
100
|
+
response.raise_for_status()
|
|
101
|
+
|
|
102
|
+
transport = AsyncTenacityTransport(
|
|
103
|
+
config=RetryConfig(
|
|
104
|
+
retry=lambda e: isinstance(e, httpx.HTTPStatusError) and e.response.status_code in retry_status_codes,
|
|
105
|
+
wait=wait_retry_after(
|
|
106
|
+
fallback_strategy=wait_exponential(multiplier=1, max=60),
|
|
107
|
+
max_wait=300
|
|
108
|
+
),
|
|
109
|
+
stop=stop_after_attempt(5),
|
|
110
|
+
reraise=True
|
|
111
|
+
),
|
|
112
|
+
validate_response=should_retry_status
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
return httpx.AsyncClient(transport=transport, verify=verify, headers=headers or {}, timeout=timeout)
|
|
116
|
+
else:
|
|
117
|
+
# Fallback to regular client if retry components are not available
|
|
118
|
+
return httpx.AsyncClient(verify=verify, headers=headers or {}, timeout=timeout)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def create_requests_session(
|
|
122
|
+
timeout: float = 5.0,
|
|
123
|
+
verify: Union[bool, str] = None,
|
|
124
|
+
headers: Optional[Dict[str, str]] = None,
|
|
125
|
+
) -> requests.Session:
|
|
126
|
+
session = requests.Session()
|
|
127
|
+
|
|
128
|
+
if verify is None:
|
|
129
|
+
verify = get_cert_bundle_path()
|
|
130
|
+
|
|
131
|
+
session.verify = verify
|
|
132
|
+
|
|
133
|
+
if headers:
|
|
134
|
+
session.headers.update(headers or {})
|
|
135
|
+
|
|
136
|
+
return session
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def create_auth_headers(
|
|
140
|
+
api_key: str, header_name: str = "Authorization"
|
|
141
|
+
) -> Dict[str, str]:
|
|
142
|
+
return {header_name: f"Bearer {api_key}"}
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def resolve_env_var_in_header(headers: Dict[str, str]) -> Dict[str, str]:
|
|
146
|
+
resolved_headers = {}
|
|
147
|
+
|
|
148
|
+
for key, value in headers.items():
|
|
149
|
+
if isinstance(value, str):
|
|
150
|
+
try:
|
|
151
|
+
expanded = os.path.expandvars(value)
|
|
152
|
+
resolved_headers[key] = expanded
|
|
153
|
+
except Exception:
|
|
154
|
+
resolved_headers[key] = value
|
|
155
|
+
else:
|
|
156
|
+
resolved_headers[key] = value
|
|
157
|
+
|
|
158
|
+
return resolved_headers
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def create_reopenable_async_client(
|
|
162
|
+
timeout: int = 180,
|
|
163
|
+
verify: Union[bool, str] = None,
|
|
164
|
+
headers: Optional[Dict[str, str]] = None,
|
|
165
|
+
retry_status_codes: tuple = (429, 502, 503, 504),
|
|
166
|
+
) -> Union[ReopenableAsyncClient, httpx.AsyncClient]:
|
|
167
|
+
if verify is None:
|
|
168
|
+
verify = get_cert_bundle_path()
|
|
169
|
+
|
|
170
|
+
# If retry components are available, create a client with retry transport
|
|
171
|
+
if AsyncTenacityTransport and RetryConfig and wait_retry_after:
|
|
172
|
+
def should_retry_status(response):
|
|
173
|
+
"""Raise exceptions for retryable HTTP status codes."""
|
|
174
|
+
if response.status_code in retry_status_codes:
|
|
175
|
+
emit_info(f"HTTP retry: Retrying request due to status code {response.status_code}")
|
|
176
|
+
response.raise_for_status()
|
|
177
|
+
|
|
178
|
+
transport = AsyncTenacityTransport(
|
|
179
|
+
config=RetryConfig(
|
|
180
|
+
retry=lambda e: isinstance(e, httpx.HTTPStatusError) and e.response.status_code in retry_status_codes,
|
|
181
|
+
wait=wait_retry_after(
|
|
182
|
+
fallback_strategy=wait_exponential(multiplier=1, max=60),
|
|
183
|
+
max_wait=300
|
|
184
|
+
),
|
|
185
|
+
stop=stop_after_attempt(5),
|
|
186
|
+
reraise=True
|
|
187
|
+
),
|
|
188
|
+
validate_response=should_retry_status
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
if ReopenableAsyncClient is not None:
|
|
192
|
+
return ReopenableAsyncClient(
|
|
193
|
+
transport=transport, verify=verify, headers=headers or {}, timeout=timeout
|
|
194
|
+
)
|
|
195
|
+
else:
|
|
196
|
+
# Fallback to regular AsyncClient if ReopenableAsyncClient is not available
|
|
197
|
+
return httpx.AsyncClient(transport=transport, verify=verify, headers=headers or {}, timeout=timeout)
|
|
198
|
+
else:
|
|
199
|
+
# Fallback to regular clients if retry components are not available
|
|
200
|
+
if ReopenableAsyncClient is not None:
|
|
201
|
+
return ReopenableAsyncClient(
|
|
202
|
+
verify=verify, headers=headers or {}, timeout=timeout
|
|
203
|
+
)
|
|
204
|
+
else:
|
|
205
|
+
# Fallback to regular AsyncClient if ReopenableAsyncClient is not available
|
|
206
|
+
return httpx.AsyncClient(verify=verify, headers=headers or {}, timeout=timeout)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def is_cert_bundle_available() -> bool:
|
|
210
|
+
cert_path = get_cert_bundle_path()
|
|
211
|
+
return os.path.exists(cert_path) and os.path.isfile(cert_path)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def find_available_port(start_port=8090, end_port=9010, host="127.0.0.1"):
|
|
215
|
+
for port in range(start_port, end_port + 1):
|
|
216
|
+
try:
|
|
217
|
+
# Try to bind to the port to check if it's available
|
|
218
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
219
|
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
220
|
+
sock.bind((host, port))
|
|
221
|
+
return port
|
|
222
|
+
except OSError:
|
|
223
|
+
# Port is in use, try the next one
|
|
224
|
+
continue
|
|
225
|
+
return None
|
|
@@ -226,11 +226,9 @@ class ManagedMCPServer:
|
|
|
226
226
|
http_kwargs["timeout"] = config["timeout"]
|
|
227
227
|
if "read_timeout" in config:
|
|
228
228
|
http_kwargs["read_timeout"] = config["read_timeout"]
|
|
229
|
-
if "
|
|
230
|
-
http_kwargs["
|
|
231
|
-
elif config.get("headers"):
|
|
229
|
+
if "headers" in config:
|
|
230
|
+
http_kwargs["headers"] = config.get("headers")
|
|
232
231
|
# Create HTTP client if headers are provided but no client specified
|
|
233
|
-
http_kwargs["http_client"] = self._get_http_client()
|
|
234
232
|
|
|
235
233
|
self._pydantic_server = MCPServerStreamableHTTP(
|
|
236
234
|
**http_kwargs, process_tool_call=process_tool_call
|
|
@@ -791,18 +791,17 @@ MCP_SERVER_REGISTRY: List[MCPServerTemplate] = [
|
|
|
791
791
|
description="Search and retrieve documentation from multiple sources with AI-powered context understanding",
|
|
792
792
|
category="Documentation",
|
|
793
793
|
tags=["documentation", "search", "context", "ai", "knowledge", "docs", "cloud"],
|
|
794
|
-
type="
|
|
794
|
+
type="http",
|
|
795
795
|
config={
|
|
796
|
-
"
|
|
797
|
-
|
|
798
|
-
|
|
796
|
+
"url": "https://mcp.context7.com/mcp",
|
|
797
|
+
"headers": {
|
|
798
|
+
"Authorization": "Bearer $CONTEXT7_API_KEY"
|
|
799
|
+
}
|
|
799
800
|
},
|
|
800
801
|
verified=True,
|
|
801
802
|
popular=True,
|
|
802
803
|
requires=MCPServerRequirements(
|
|
803
804
|
environment_vars=["CONTEXT7_API_KEY"],
|
|
804
|
-
required_tools=["node", "npx"],
|
|
805
|
-
package_dependencies=["@upstash/context7-mcp"],
|
|
806
805
|
),
|
|
807
806
|
example_usage="Cloud-based service - no local setup required",
|
|
808
807
|
),
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import queue
|
|
3
|
-
from typing import Any,
|
|
3
|
+
from typing import Any, List, Set, Tuple
|
|
4
4
|
|
|
5
5
|
import pydantic
|
|
6
6
|
from pydantic_ai.messages import ModelMessage, ModelRequest, TextPart, ToolCallPart
|
|
7
7
|
|
|
8
8
|
from code_puppy.config import (
|
|
9
|
-
get_compaction_strategy,
|
|
10
|
-
get_compaction_threshold,
|
|
11
9
|
get_model_name,
|
|
12
10
|
get_protected_token_count,
|
|
11
|
+
get_compaction_threshold,
|
|
12
|
+
get_compaction_strategy,
|
|
13
13
|
)
|
|
14
14
|
from code_puppy.messaging import emit_error, emit_info, emit_warning
|
|
15
15
|
from code_puppy.model_factory import ModelFactory
|
|
@@ -82,9 +82,7 @@ def estimate_tokens_for_message(message: ModelMessage) -> int:
|
|
|
82
82
|
|
|
83
83
|
|
|
84
84
|
def filter_huge_messages(messages: List[ModelMessage]) -> List[ModelMessage]:
|
|
85
|
-
|
|
86
|
-
deduplicated = deduplicate_tool_returns(messages)
|
|
87
|
-
filtered = [m for m in deduplicated if estimate_tokens_for_message(m) < 50000]
|
|
85
|
+
filtered = [m for m in messages if estimate_tokens_for_message(m) < 50000]
|
|
88
86
|
pruned = prune_interrupted_tool_calls(filtered)
|
|
89
87
|
return pruned
|
|
90
88
|
|
|
@@ -150,6 +148,81 @@ def split_messages_for_protected_summarization(
|
|
|
150
148
|
return messages_to_summarize, protected_messages
|
|
151
149
|
|
|
152
150
|
|
|
151
|
+
def deduplicate_tool_returns(messages: List[ModelMessage]) -> List[ModelMessage]:
|
|
152
|
+
"""
|
|
153
|
+
Remove duplicate tool returns while preserving the first occurrence for each tool_call_id.
|
|
154
|
+
|
|
155
|
+
This function identifies tool-return parts that share the same tool_call_id and
|
|
156
|
+
removes duplicates, keeping only the first return for each id. This prevents
|
|
157
|
+
conversation corruption from duplicate tool_result blocks.
|
|
158
|
+
"""
|
|
159
|
+
if not messages:
|
|
160
|
+
return messages
|
|
161
|
+
|
|
162
|
+
seen_tool_returns: Set[str] = set()
|
|
163
|
+
deduplicated: List[ModelMessage] = []
|
|
164
|
+
removed_count = 0
|
|
165
|
+
|
|
166
|
+
for msg in messages:
|
|
167
|
+
# Check if this message has any parts we need to filter
|
|
168
|
+
if not hasattr(msg, "parts") or not msg.parts:
|
|
169
|
+
deduplicated.append(msg)
|
|
170
|
+
continue
|
|
171
|
+
|
|
172
|
+
# Filter parts within this message
|
|
173
|
+
filtered_parts = []
|
|
174
|
+
msg_had_duplicates = False
|
|
175
|
+
|
|
176
|
+
for part in msg.parts:
|
|
177
|
+
tool_call_id = getattr(part, "tool_call_id", None)
|
|
178
|
+
part_kind = getattr(part, "part_kind", None)
|
|
179
|
+
|
|
180
|
+
# Check if this is a tool-return part
|
|
181
|
+
if tool_call_id and part_kind in {
|
|
182
|
+
"tool-return",
|
|
183
|
+
"tool-result",
|
|
184
|
+
"tool_result",
|
|
185
|
+
}:
|
|
186
|
+
if tool_call_id in seen_tool_returns:
|
|
187
|
+
# This is a duplicate return, skip it
|
|
188
|
+
msg_had_duplicates = True
|
|
189
|
+
removed_count += 1
|
|
190
|
+
continue
|
|
191
|
+
else:
|
|
192
|
+
# First occurrence of this return, keep it
|
|
193
|
+
seen_tool_returns.add(tool_call_id)
|
|
194
|
+
filtered_parts.append(part)
|
|
195
|
+
else:
|
|
196
|
+
# Not a tool return, always keep
|
|
197
|
+
filtered_parts.append(part)
|
|
198
|
+
|
|
199
|
+
# If we filtered out parts, create a new message with filtered parts
|
|
200
|
+
if msg_had_duplicates and filtered_parts:
|
|
201
|
+
# Create a new message with the same attributes but filtered parts
|
|
202
|
+
new_msg = type(msg)(parts=filtered_parts)
|
|
203
|
+
# Copy over other attributes if they exist
|
|
204
|
+
for attr_name in dir(msg):
|
|
205
|
+
if (
|
|
206
|
+
not attr_name.startswith("_")
|
|
207
|
+
and attr_name != "parts"
|
|
208
|
+
and hasattr(msg, attr_name)
|
|
209
|
+
):
|
|
210
|
+
try:
|
|
211
|
+
setattr(new_msg, attr_name, getattr(msg, attr_name))
|
|
212
|
+
except (AttributeError, TypeError):
|
|
213
|
+
# Skip attributes that can't be set
|
|
214
|
+
pass
|
|
215
|
+
deduplicated.append(new_msg)
|
|
216
|
+
elif filtered_parts: # No duplicates but has parts
|
|
217
|
+
deduplicated.append(msg)
|
|
218
|
+
# If no parts remain after filtering, drop the entire message
|
|
219
|
+
|
|
220
|
+
if removed_count > 0:
|
|
221
|
+
emit_warning(f"Removed {removed_count} duplicate tool-return part(s)")
|
|
222
|
+
|
|
223
|
+
return deduplicated
|
|
224
|
+
|
|
225
|
+
|
|
153
226
|
def summarize_messages(
|
|
154
227
|
messages: List[ModelMessage], with_protection=True
|
|
155
228
|
) -> Tuple[List[ModelMessage], List[ModelMessage]]:
|
|
@@ -236,100 +309,21 @@ def get_model_context_length() -> int:
|
|
|
236
309
|
return int(context_length)
|
|
237
310
|
|
|
238
311
|
|
|
239
|
-
def deduplicate_tool_returns(messages: List[ModelMessage]) -> List[ModelMessage]:
|
|
240
|
-
"""
|
|
241
|
-
Remove duplicate tool returns while preserving the first occurrence for each tool_call_id.
|
|
242
|
-
|
|
243
|
-
This function identifies tool-return parts that share the same tool_call_id and
|
|
244
|
-
removes duplicates, keeping only the first return for each id. This prevents
|
|
245
|
-
conversation corruption from duplicate tool_result blocks.
|
|
246
|
-
"""
|
|
247
|
-
if not messages:
|
|
248
|
-
return messages
|
|
249
|
-
|
|
250
|
-
seen_tool_returns: Set[str] = set()
|
|
251
|
-
deduplicated: List[ModelMessage] = []
|
|
252
|
-
removed_count = 0
|
|
253
|
-
|
|
254
|
-
for msg in messages:
|
|
255
|
-
# Check if this message has any parts we need to filter
|
|
256
|
-
if not hasattr(msg, "parts") or not msg.parts:
|
|
257
|
-
deduplicated.append(msg)
|
|
258
|
-
continue
|
|
259
|
-
|
|
260
|
-
# Filter parts within this message
|
|
261
|
-
filtered_parts = []
|
|
262
|
-
msg_had_duplicates = False
|
|
263
|
-
|
|
264
|
-
for part in msg.parts:
|
|
265
|
-
tool_call_id = getattr(part, "tool_call_id", None)
|
|
266
|
-
part_kind = getattr(part, "part_kind", None)
|
|
267
|
-
|
|
268
|
-
# Check if this is a tool-return part
|
|
269
|
-
if tool_call_id and part_kind in {
|
|
270
|
-
"tool-return",
|
|
271
|
-
"tool-result",
|
|
272
|
-
"tool_result",
|
|
273
|
-
}:
|
|
274
|
-
if tool_call_id in seen_tool_returns:
|
|
275
|
-
# This is a duplicate return, skip it
|
|
276
|
-
msg_had_duplicates = True
|
|
277
|
-
removed_count += 1
|
|
278
|
-
continue
|
|
279
|
-
else:
|
|
280
|
-
# First occurrence of this return, keep it
|
|
281
|
-
seen_tool_returns.add(tool_call_id)
|
|
282
|
-
filtered_parts.append(part)
|
|
283
|
-
else:
|
|
284
|
-
# Not a tool return, always keep
|
|
285
|
-
filtered_parts.append(part)
|
|
286
|
-
|
|
287
|
-
# If we filtered out parts, create a new message with filtered parts
|
|
288
|
-
if msg_had_duplicates and filtered_parts:
|
|
289
|
-
# Create a new message with the same attributes but filtered parts
|
|
290
|
-
new_msg = type(msg)(parts=filtered_parts)
|
|
291
|
-
# Copy over other attributes if they exist
|
|
292
|
-
for attr_name in dir(msg):
|
|
293
|
-
if (
|
|
294
|
-
not attr_name.startswith("_")
|
|
295
|
-
and attr_name != "parts"
|
|
296
|
-
and hasattr(msg, attr_name)
|
|
297
|
-
):
|
|
298
|
-
try:
|
|
299
|
-
setattr(new_msg, attr_name, getattr(msg, attr_name))
|
|
300
|
-
except (AttributeError, TypeError):
|
|
301
|
-
# Skip attributes that can't be set
|
|
302
|
-
pass
|
|
303
|
-
deduplicated.append(new_msg)
|
|
304
|
-
elif filtered_parts: # No duplicates but has parts
|
|
305
|
-
deduplicated.append(msg)
|
|
306
|
-
# If no parts remain after filtering, drop the entire message
|
|
307
|
-
|
|
308
|
-
if removed_count > 0:
|
|
309
|
-
emit_warning(f"Removed {removed_count} duplicate tool-return part(s)")
|
|
310
|
-
|
|
311
|
-
return deduplicated
|
|
312
|
-
|
|
313
|
-
|
|
314
312
|
def prune_interrupted_tool_calls(messages: List[ModelMessage]) -> List[ModelMessage]:
|
|
315
313
|
"""
|
|
316
314
|
Remove any messages that participate in mismatched tool call sequences.
|
|
317
315
|
|
|
318
316
|
A mismatched tool call id is one that appears in a ToolCall (model/tool request)
|
|
319
|
-
without a corresponding tool return, or vice versa. We
|
|
320
|
-
|
|
321
|
-
messages that contain parts referencing mismatched tool_call_ids.
|
|
317
|
+
without a corresponding tool return, or vice versa. We preserve original order
|
|
318
|
+
and only drop messages that contain parts referencing mismatched tool_call_ids.
|
|
322
319
|
"""
|
|
323
320
|
if not messages:
|
|
324
321
|
return messages
|
|
325
322
|
|
|
326
|
-
|
|
327
|
-
|
|
323
|
+
tool_call_ids: Set[str] = set()
|
|
324
|
+
tool_return_ids: Set[str] = set()
|
|
328
325
|
|
|
329
|
-
|
|
330
|
-
tool_return_counts: Dict[str, int] = {}
|
|
331
|
-
|
|
332
|
-
# First pass: count occurrences of each tool_call_id for calls vs returns
|
|
326
|
+
# First pass: collect ids for calls vs returns
|
|
333
327
|
for msg in messages:
|
|
334
328
|
for part in getattr(msg, "parts", []) or []:
|
|
335
329
|
tool_call_id = getattr(part, "tool_call_id", None)
|
|
@@ -338,25 +332,11 @@ def prune_interrupted_tool_calls(messages: List[ModelMessage]) -> List[ModelMess
|
|
|
338
332
|
# Heuristic: if it's an explicit ToolCallPart or has a tool_name/args,
|
|
339
333
|
# consider it a call; otherwise it's a return/result.
|
|
340
334
|
if part.part_kind == "tool-call":
|
|
341
|
-
|
|
342
|
-
tool_call_counts.get(tool_call_id, 0) + 1
|
|
343
|
-
)
|
|
335
|
+
tool_call_ids.add(tool_call_id)
|
|
344
336
|
else:
|
|
345
|
-
|
|
346
|
-
tool_return_counts.get(tool_call_id, 0) + 1
|
|
347
|
-
)
|
|
348
|
-
|
|
349
|
-
# Find mismatched tool_call_ids (not exactly 1:1 ratio)
|
|
350
|
-
all_tool_ids = set(tool_call_counts.keys()) | set(tool_return_counts.keys())
|
|
351
|
-
mismatched: Set[str] = set()
|
|
352
|
-
|
|
353
|
-
for tool_id in all_tool_ids:
|
|
354
|
-
call_count = tool_call_counts.get(tool_id, 0)
|
|
355
|
-
return_count = tool_return_counts.get(tool_id, 0)
|
|
356
|
-
# Enforce strict 1:1 ratio - both must be exactly 1
|
|
357
|
-
if call_count != 1 or return_count != 1:
|
|
358
|
-
mismatched.add(tool_id)
|
|
337
|
+
tool_return_ids.add(tool_call_id)
|
|
359
338
|
|
|
339
|
+
mismatched: Set[str] = tool_call_ids.symmetric_difference(tool_return_ids)
|
|
360
340
|
if not mismatched:
|
|
361
341
|
return messages
|
|
362
342
|
|
|
@@ -382,10 +362,7 @@ def prune_interrupted_tool_calls(messages: List[ModelMessage]) -> List[ModelMess
|
|
|
382
362
|
|
|
383
363
|
|
|
384
364
|
def message_history_processor(messages: List[ModelMessage]) -> List[ModelMessage]:
|
|
385
|
-
# First,
|
|
386
|
-
messages = deduplicate_tool_returns(messages)
|
|
387
|
-
|
|
388
|
-
# Then, prune any interrupted/mismatched tool-call conversations
|
|
365
|
+
# First, prune any interrupted/mismatched tool-call conversations
|
|
389
366
|
total_current_tokens = sum(estimate_tokens_for_message(msg) for msg in messages)
|
|
390
367
|
|
|
391
368
|
model_max = get_model_context_length()
|
|
@@ -477,8 +454,6 @@ def truncation(
|
|
|
477
454
|
messages: List[ModelMessage], protected_tokens: int
|
|
478
455
|
) -> List[ModelMessage]:
|
|
479
456
|
emit_info("Truncating message history to manage token usage")
|
|
480
|
-
# First deduplicate tool returns to clean up any duplicates
|
|
481
|
-
messages = deduplicate_tool_returns(messages)
|
|
482
457
|
result = [messages[0]] # Always keep the first message (system prompt)
|
|
483
458
|
num_tokens = 0
|
|
484
459
|
stack = queue.LifoQueue()
|
|
@@ -501,10 +476,6 @@ def truncation(
|
|
|
501
476
|
|
|
502
477
|
def message_history_accumulator(messages: List[Any]):
|
|
503
478
|
_message_history = get_message_history()
|
|
504
|
-
|
|
505
|
-
# Deduplicate tool returns in current history before processing new messages
|
|
506
|
-
_message_history = deduplicate_tool_returns(_message_history)
|
|
507
|
-
|
|
508
479
|
message_history_hashes = set([hash_message(m) for m in _message_history])
|
|
509
480
|
for msg in messages:
|
|
510
481
|
if (
|
|
@@ -513,12 +484,6 @@ def message_history_accumulator(messages: List[Any]):
|
|
|
513
484
|
):
|
|
514
485
|
_message_history.append(msg)
|
|
515
486
|
|
|
516
|
-
# Deduplicate tool returns again after adding new messages to ensure no duplicates
|
|
517
|
-
_message_history = deduplicate_tool_returns(_message_history)
|
|
518
|
-
|
|
519
|
-
# Update the message history with deduplicated messages
|
|
520
|
-
set_message_history(_message_history)
|
|
521
|
-
|
|
522
487
|
# Apply message history trimming using the main processor
|
|
523
488
|
# This ensures we maintain global state while still managing context limits
|
|
524
489
|
message_history_processor(_message_history)
|