code-puppy 0.0.96__py3-none-any.whl → 0.0.118__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- code_puppy/__init__.py +2 -5
- code_puppy/__main__.py +10 -0
- code_puppy/agent.py +125 -40
- code_puppy/agent_prompts.py +30 -24
- code_puppy/callbacks.py +152 -0
- code_puppy/command_line/command_handler.py +359 -0
- code_puppy/command_line/load_context_completion.py +59 -0
- code_puppy/command_line/model_picker_completion.py +14 -21
- code_puppy/command_line/motd.py +44 -28
- code_puppy/command_line/prompt_toolkit_completion.py +42 -23
- code_puppy/config.py +266 -26
- code_puppy/http_utils.py +122 -0
- code_puppy/main.py +570 -383
- code_puppy/message_history_processor.py +195 -104
- code_puppy/messaging/__init__.py +46 -0
- code_puppy/messaging/message_queue.py +288 -0
- code_puppy/messaging/queue_console.py +293 -0
- code_puppy/messaging/renderers.py +305 -0
- code_puppy/messaging/spinner/__init__.py +55 -0
- code_puppy/messaging/spinner/console_spinner.py +200 -0
- code_puppy/messaging/spinner/spinner_base.py +66 -0
- code_puppy/messaging/spinner/textual_spinner.py +97 -0
- code_puppy/model_factory.py +73 -105
- code_puppy/plugins/__init__.py +32 -0
- code_puppy/reopenable_async_client.py +225 -0
- code_puppy/state_management.py +60 -21
- code_puppy/summarization_agent.py +56 -35
- code_puppy/token_utils.py +7 -9
- code_puppy/tools/__init__.py +1 -4
- code_puppy/tools/command_runner.py +187 -32
- code_puppy/tools/common.py +44 -35
- code_puppy/tools/file_modifications.py +335 -118
- code_puppy/tools/file_operations.py +368 -95
- code_puppy/tools/token_check.py +27 -11
- code_puppy/tools/tools_content.py +53 -0
- code_puppy/tui/__init__.py +10 -0
- code_puppy/tui/app.py +1050 -0
- code_puppy/tui/components/__init__.py +21 -0
- code_puppy/tui/components/chat_view.py +512 -0
- code_puppy/tui/components/command_history_modal.py +218 -0
- code_puppy/tui/components/copy_button.py +139 -0
- code_puppy/tui/components/custom_widgets.py +58 -0
- code_puppy/tui/components/input_area.py +167 -0
- code_puppy/tui/components/sidebar.py +309 -0
- code_puppy/tui/components/status_bar.py +182 -0
- code_puppy/tui/messages.py +27 -0
- code_puppy/tui/models/__init__.py +8 -0
- code_puppy/tui/models/chat_message.py +25 -0
- code_puppy/tui/models/command_history.py +89 -0
- code_puppy/tui/models/enums.py +24 -0
- code_puppy/tui/screens/__init__.py +13 -0
- code_puppy/tui/screens/help.py +130 -0
- code_puppy/tui/screens/settings.py +256 -0
- code_puppy/tui/screens/tools.py +74 -0
- code_puppy/tui/tests/__init__.py +1 -0
- code_puppy/tui/tests/test_chat_message.py +28 -0
- code_puppy/tui/tests/test_chat_view.py +88 -0
- code_puppy/tui/tests/test_command_history.py +89 -0
- code_puppy/tui/tests/test_copy_button.py +191 -0
- code_puppy/tui/tests/test_custom_widgets.py +27 -0
- code_puppy/tui/tests/test_disclaimer.py +27 -0
- code_puppy/tui/tests/test_enums.py +15 -0
- code_puppy/tui/tests/test_file_browser.py +60 -0
- code_puppy/tui/tests/test_help.py +38 -0
- code_puppy/tui/tests/test_history_file_reader.py +107 -0
- code_puppy/tui/tests/test_input_area.py +33 -0
- code_puppy/tui/tests/test_settings.py +44 -0
- code_puppy/tui/tests/test_sidebar.py +33 -0
- code_puppy/tui/tests/test_sidebar_history.py +153 -0
- code_puppy/tui/tests/test_sidebar_history_navigation.py +132 -0
- code_puppy/tui/tests/test_status_bar.py +54 -0
- code_puppy/tui/tests/test_timestamped_history.py +52 -0
- code_puppy/tui/tests/test_tools.py +82 -0
- code_puppy/version_checker.py +26 -3
- {code_puppy-0.0.96.dist-info → code_puppy-0.0.118.dist-info}/METADATA +9 -2
- code_puppy-0.0.118.dist-info/RECORD +86 -0
- code_puppy-0.0.96.dist-info/RECORD +0 -32
- {code_puppy-0.0.96.data → code_puppy-0.0.118.data}/data/code_puppy/models.json +0 -0
- {code_puppy-0.0.96.dist-info → code_puppy-0.0.118.dist-info}/WHEEL +0 -0
- {code_puppy-0.0.96.dist-info → code_puppy-0.0.118.dist-info}/entry_points.txt +0 -0
- {code_puppy-0.0.96.dist-info → code_puppy-0.0.118.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Textual spinner implementation for TUI mode.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from textual.widgets import Static
|
|
6
|
+
|
|
7
|
+
from .spinner_base import SpinnerBase
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TextualSpinner(Static):
|
|
11
|
+
"""A textual spinner widget based on the SimpleSpinnerWidget."""
|
|
12
|
+
|
|
13
|
+
# Use the frames from SpinnerBase
|
|
14
|
+
FRAMES = SpinnerBase.FRAMES
|
|
15
|
+
|
|
16
|
+
def __init__(self, **kwargs):
|
|
17
|
+
"""Initialize the textual spinner."""
|
|
18
|
+
super().__init__("", **kwargs)
|
|
19
|
+
self._frame_index = 0
|
|
20
|
+
self._is_spinning = False
|
|
21
|
+
self._timer = None
|
|
22
|
+
self._paused = False
|
|
23
|
+
self._previous_state = ""
|
|
24
|
+
|
|
25
|
+
# Register this spinner for global management
|
|
26
|
+
from . import register_spinner
|
|
27
|
+
|
|
28
|
+
register_spinner(self)
|
|
29
|
+
|
|
30
|
+
def start_spinning(self):
|
|
31
|
+
"""Start the spinner animation using Textual's timer system."""
|
|
32
|
+
if not self._is_spinning:
|
|
33
|
+
self._is_spinning = True
|
|
34
|
+
self._frame_index = 0
|
|
35
|
+
self.update_frame_display()
|
|
36
|
+
# Start the animation timer using Textual's timer system
|
|
37
|
+
self._timer = self.set_interval(0.10, self.update_frame_display)
|
|
38
|
+
|
|
39
|
+
def stop_spinning(self):
|
|
40
|
+
"""Stop the spinner animation."""
|
|
41
|
+
self._is_spinning = False
|
|
42
|
+
if self._timer:
|
|
43
|
+
self._timer.stop()
|
|
44
|
+
self._timer = None
|
|
45
|
+
self.update("")
|
|
46
|
+
|
|
47
|
+
# Unregister this spinner from global management
|
|
48
|
+
from . import unregister_spinner
|
|
49
|
+
|
|
50
|
+
unregister_spinner(self)
|
|
51
|
+
|
|
52
|
+
def update_frame(self):
|
|
53
|
+
"""Update to the next frame."""
|
|
54
|
+
if self._is_spinning:
|
|
55
|
+
self._frame_index = (self._frame_index + 1) % len(self.FRAMES)
|
|
56
|
+
|
|
57
|
+
def update_frame_display(self):
|
|
58
|
+
"""Update the display with the current frame."""
|
|
59
|
+
if self._is_spinning:
|
|
60
|
+
self.update_frame()
|
|
61
|
+
current_frame = self.FRAMES[self._frame_index]
|
|
62
|
+
|
|
63
|
+
# Check if we're awaiting user input to determine which message to show
|
|
64
|
+
from code_puppy.tools.command_runner import is_awaiting_user_input
|
|
65
|
+
|
|
66
|
+
if is_awaiting_user_input():
|
|
67
|
+
# Show waiting message when waiting for user input
|
|
68
|
+
message = SpinnerBase.WAITING_MESSAGE
|
|
69
|
+
else:
|
|
70
|
+
# Show thinking message during normal processing
|
|
71
|
+
message = SpinnerBase.THINKING_MESSAGE
|
|
72
|
+
|
|
73
|
+
self.update(
|
|
74
|
+
f"[bold cyan]{message}[/bold cyan][bold cyan]{current_frame}[/bold cyan]"
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
def pause(self):
|
|
78
|
+
"""Pause the spinner animation temporarily."""
|
|
79
|
+
if self._is_spinning and self._timer and not self._paused:
|
|
80
|
+
self._paused = True
|
|
81
|
+
self._timer.pause()
|
|
82
|
+
# Store current state but don't clear it completely
|
|
83
|
+
self._previous_state = self.text
|
|
84
|
+
self.update("")
|
|
85
|
+
|
|
86
|
+
def resume(self):
|
|
87
|
+
"""Resume a paused spinner animation."""
|
|
88
|
+
# Check if we should show a spinner - don't resume if waiting for user input
|
|
89
|
+
from code_puppy.tools.command_runner import is_awaiting_user_input
|
|
90
|
+
|
|
91
|
+
if is_awaiting_user_input():
|
|
92
|
+
return # Don't resume if waiting for user input
|
|
93
|
+
|
|
94
|
+
if self._is_spinning and self._timer and self._paused:
|
|
95
|
+
self._paused = False
|
|
96
|
+
self._timer.resume()
|
|
97
|
+
self.update_frame_display()
|
code_puppy/model_factory.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import logging
|
|
2
3
|
import os
|
|
3
|
-
import
|
|
4
|
+
import pathlib
|
|
4
5
|
from typing import Any, Dict
|
|
5
6
|
|
|
6
7
|
import httpx
|
|
@@ -12,9 +13,10 @@ from pydantic_ai.models.openai import OpenAIModel
|
|
|
12
13
|
from pydantic_ai.providers.anthropic import AnthropicProvider
|
|
13
14
|
from pydantic_ai.providers.google_gla import GoogleGLAProvider
|
|
14
15
|
from pydantic_ai.providers.openai import OpenAIProvider
|
|
15
|
-
from pydantic_ai.providers.openrouter import OpenRouterProvider
|
|
16
16
|
|
|
17
|
-
from
|
|
17
|
+
from . import callbacks
|
|
18
|
+
from .config import EXTRA_MODELS_FILE
|
|
19
|
+
from .http_utils import create_async_client
|
|
18
20
|
|
|
19
21
|
# Environment variables used in this module:
|
|
20
22
|
# - GEMINI_API_KEY: API key for Google's Gemini models. Required when using Gemini models.
|
|
@@ -26,59 +28,6 @@ from code_puppy.tools.common import console
|
|
|
26
28
|
# Example: "X-Api-Key": "$OPENAI_API_KEY" will use the value from os.environ.get("OPENAI_API_KEY")
|
|
27
29
|
|
|
28
30
|
|
|
29
|
-
def build_proxy_dict(proxy):
|
|
30
|
-
proxy_tokens = proxy.split(":")
|
|
31
|
-
structure = "{}:{}@{}:{}".format(
|
|
32
|
-
proxy_tokens[2], proxy_tokens[3], proxy_tokens[0], proxy_tokens[1]
|
|
33
|
-
)
|
|
34
|
-
proxies = {
|
|
35
|
-
"http": "http://{}/".format(structure),
|
|
36
|
-
"https": "http://{}".format(structure),
|
|
37
|
-
}
|
|
38
|
-
return proxies
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def build_httpx_proxy(proxy):
|
|
42
|
-
"""Build an httpx.Proxy object from a proxy string in format ip:port:username:password"""
|
|
43
|
-
proxy_tokens = proxy.split(":")
|
|
44
|
-
if len(proxy_tokens) != 4:
|
|
45
|
-
raise ValueError(
|
|
46
|
-
f"Invalid proxy format: {proxy}. Expected format: ip:port:username:password"
|
|
47
|
-
)
|
|
48
|
-
|
|
49
|
-
ip, port, username, password = proxy_tokens
|
|
50
|
-
proxy_url = f"http://{ip}:{port}"
|
|
51
|
-
proxy_auth = (username, password)
|
|
52
|
-
|
|
53
|
-
# Log the proxy being used
|
|
54
|
-
console.log(f"Using proxy: {proxy_url} with username: {username}")
|
|
55
|
-
|
|
56
|
-
return httpx.Proxy(url=proxy_url, auth=proxy_auth)
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
def get_random_proxy_from_file(file_path):
|
|
60
|
-
"""Reads proxy file and returns a random proxy formatted for httpx.AsyncClient"""
|
|
61
|
-
if not os.path.exists(file_path):
|
|
62
|
-
raise ValueError(f"Proxy file '{file_path}' not found.")
|
|
63
|
-
|
|
64
|
-
with open(file_path, "r") as f:
|
|
65
|
-
proxies = [line.strip() for line in f.readlines() if line.strip()]
|
|
66
|
-
|
|
67
|
-
if not proxies:
|
|
68
|
-
raise ValueError(
|
|
69
|
-
f"Proxy file '{file_path}' is empty or contains only whitespace."
|
|
70
|
-
)
|
|
71
|
-
|
|
72
|
-
selected_proxy = random.choice(proxies)
|
|
73
|
-
try:
|
|
74
|
-
return build_httpx_proxy(selected_proxy)
|
|
75
|
-
except ValueError:
|
|
76
|
-
console.log(
|
|
77
|
-
f"Warning: Malformed proxy '{selected_proxy}' found in file '{file_path}', ignoring and continuing without proxy."
|
|
78
|
-
)
|
|
79
|
-
return None
|
|
80
|
-
|
|
81
|
-
|
|
82
31
|
def get_custom_config(model_config):
|
|
83
32
|
custom_config = model_config.get("custom_endpoint", {})
|
|
84
33
|
if not custom_config:
|
|
@@ -91,32 +40,62 @@ def get_custom_config(model_config):
|
|
|
91
40
|
headers = {}
|
|
92
41
|
for key, value in custom_config.get("headers", {}).items():
|
|
93
42
|
if value.startswith("$"):
|
|
94
|
-
|
|
43
|
+
env_var_name = value[1:]
|
|
44
|
+
resolved_value = os.environ.get(env_var_name)
|
|
45
|
+
if resolved_value is None:
|
|
46
|
+
raise ValueError(
|
|
47
|
+
f"Environment variable '{env_var_name}' is required for custom endpoint headers but is not set. "
|
|
48
|
+
f"Please set the environment variable: export {env_var_name}=your_value"
|
|
49
|
+
)
|
|
50
|
+
value = resolved_value
|
|
95
51
|
headers[key] = value
|
|
96
52
|
|
|
97
|
-
ca_certs_path = None
|
|
98
|
-
if "ca_certs_path" in custom_config:
|
|
99
|
-
ca_certs_path = custom_config.get("ca_certs_path")
|
|
100
|
-
if ca_certs_path.lower() == "false":
|
|
101
|
-
ca_certs_path = False
|
|
102
|
-
|
|
103
53
|
api_key = None
|
|
104
54
|
if "api_key" in custom_config:
|
|
105
55
|
if custom_config["api_key"].startswith("$"):
|
|
106
|
-
|
|
56
|
+
env_var_name = custom_config["api_key"][1:]
|
|
57
|
+
api_key = os.environ.get(env_var_name)
|
|
58
|
+
if api_key is None:
|
|
59
|
+
raise ValueError(
|
|
60
|
+
f"Environment variable '{env_var_name}' is required for custom endpoint API key but is not set. "
|
|
61
|
+
f"Please set the environment variable: export {env_var_name}=your_value"
|
|
62
|
+
)
|
|
107
63
|
else:
|
|
108
64
|
api_key = custom_config["api_key"]
|
|
109
|
-
|
|
65
|
+
if "ca_certs_path" in custom_config:
|
|
66
|
+
verify = custom_config["ca_certs_path"]
|
|
67
|
+
else:
|
|
68
|
+
verify = None
|
|
69
|
+
return url, headers, verify, api_key
|
|
110
70
|
|
|
111
71
|
|
|
112
72
|
class ModelFactory:
|
|
113
73
|
"""A factory for creating and managing different AI models."""
|
|
114
74
|
|
|
115
75
|
@staticmethod
|
|
116
|
-
def load_config(
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
76
|
+
def load_config() -> Dict[str, Any]:
|
|
77
|
+
load_model_config_callbacks = callbacks.get_callbacks("load_model_config")
|
|
78
|
+
if len(load_model_config_callbacks) > 0:
|
|
79
|
+
if len(load_model_config_callbacks) > 1:
|
|
80
|
+
logging.getLogger(__name__).warning(
|
|
81
|
+
"Multiple load_model_config callbacks registered, using the first"
|
|
82
|
+
)
|
|
83
|
+
config = callbacks.on_load_model_config()[0]
|
|
84
|
+
else:
|
|
85
|
+
from code_puppy.config import MODELS_FILE
|
|
86
|
+
|
|
87
|
+
if not pathlib.Path(MODELS_FILE).exists():
|
|
88
|
+
with open(pathlib.Path(__file__).parent / "models.json", "r") as src:
|
|
89
|
+
with open(pathlib.Path(MODELS_FILE), "w") as target:
|
|
90
|
+
target.write(src.read())
|
|
91
|
+
|
|
92
|
+
with open(MODELS_FILE, "r") as f:
|
|
93
|
+
config = json.load(f)
|
|
94
|
+
if pathlib.Path(EXTRA_MODELS_FILE).exists():
|
|
95
|
+
with open(EXTRA_MODELS_FILE, "r") as f:
|
|
96
|
+
extra_config = json.load(f)
|
|
97
|
+
config.update(extra_config)
|
|
98
|
+
return config
|
|
120
99
|
|
|
121
100
|
@staticmethod
|
|
122
101
|
def get_model(model_name: str, config: Dict[str, Any]) -> Any:
|
|
@@ -152,19 +131,8 @@ class ModelFactory:
|
|
|
152
131
|
return AnthropicModel(model_name=model_config["name"], provider=provider)
|
|
153
132
|
|
|
154
133
|
elif model_type == "custom_anthropic":
|
|
155
|
-
url, headers,
|
|
156
|
-
|
|
157
|
-
# Check for proxy configuration
|
|
158
|
-
proxy_file_path = os.environ.get("CODE_PUPPY_PROXIES")
|
|
159
|
-
proxy = None
|
|
160
|
-
if proxy_file_path:
|
|
161
|
-
proxy = get_random_proxy_from_file(proxy_file_path)
|
|
162
|
-
|
|
163
|
-
# Only pass proxy to client if it's valid
|
|
164
|
-
client_args = {"headers": headers, "verify": ca_certs_path}
|
|
165
|
-
if proxy is not None:
|
|
166
|
-
client_args["proxy"] = proxy
|
|
167
|
-
client = httpx.AsyncClient(**client_args)
|
|
134
|
+
url, headers, verify, api_key = get_custom_config(model_config)
|
|
135
|
+
client = create_async_client(headers=headers, verify=verify)
|
|
168
136
|
anthropic_client = AsyncAnthropic(
|
|
169
137
|
base_url=url,
|
|
170
138
|
http_client=client,
|
|
@@ -228,19 +196,8 @@ class ModelFactory:
|
|
|
228
196
|
return model
|
|
229
197
|
|
|
230
198
|
elif model_type == "custom_openai":
|
|
231
|
-
url, headers,
|
|
232
|
-
|
|
233
|
-
# Check for proxy configuration
|
|
234
|
-
proxy_file_path = os.environ.get("CODE_PUPPY_PROXIES")
|
|
235
|
-
proxy = None
|
|
236
|
-
if proxy_file_path:
|
|
237
|
-
proxy = get_random_proxy_from_file(proxy_file_path)
|
|
238
|
-
|
|
239
|
-
# Only pass proxy to client if it's valid
|
|
240
|
-
client_args = {"headers": headers, "verify": ca_certs_path}
|
|
241
|
-
if proxy is not None:
|
|
242
|
-
client_args["proxy"] = proxy
|
|
243
|
-
client = httpx.AsyncClient(**client_args)
|
|
199
|
+
url, headers, verify, api_key = get_custom_config(model_config)
|
|
200
|
+
client = create_async_client(headers=headers, verify=verify)
|
|
244
201
|
provider_args = dict(
|
|
245
202
|
base_url=url,
|
|
246
203
|
http_client=client,
|
|
@@ -252,16 +209,27 @@ class ModelFactory:
|
|
|
252
209
|
model = OpenAIModel(model_name=model_config["name"], provider=provider)
|
|
253
210
|
setattr(model, "provider", provider)
|
|
254
211
|
return model
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
212
|
+
|
|
213
|
+
elif model_type == "custom_gemini":
|
|
214
|
+
url, headers, verify, api_key = get_custom_config(model_config)
|
|
215
|
+
os.environ["GEMINI_API_KEY"] = api_key
|
|
216
|
+
|
|
217
|
+
class CustomGoogleGLAProvider(GoogleGLAProvider):
|
|
218
|
+
def __init__(self, *args, **kwargs):
|
|
219
|
+
super().__init__(*args, **kwargs)
|
|
220
|
+
|
|
221
|
+
@property
|
|
222
|
+
def base_url(self):
|
|
223
|
+
return url
|
|
224
|
+
|
|
225
|
+
@property
|
|
226
|
+
def client(self) -> httpx.AsyncClient:
|
|
227
|
+
_client = create_async_client(headers=headers, verify=verify)
|
|
228
|
+
_client.base_url = self.base_url
|
|
229
|
+
return _client
|
|
230
|
+
|
|
231
|
+
google_gla = CustomGoogleGLAProvider(api_key=api_key)
|
|
232
|
+
model = GeminiModel(model_name=model_config["name"], provider=google_gla)
|
|
265
233
|
return model
|
|
266
234
|
else:
|
|
267
235
|
raise ValueError(f"Unsupported model type: {model_type}")
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import logging
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
logger = logging.getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def load_plugin_callbacks():
|
|
9
|
+
"""Dynamically load register_callbacks.py from all plugin submodules."""
|
|
10
|
+
plugins_dir = Path(__file__).parent
|
|
11
|
+
|
|
12
|
+
# Iterate through all subdirectories in the plugins folder
|
|
13
|
+
for item in plugins_dir.iterdir():
|
|
14
|
+
if item.is_dir() and not item.name.startswith("_"):
|
|
15
|
+
plugin_name = item.name
|
|
16
|
+
callbacks_file = item / "register_callbacks.py"
|
|
17
|
+
|
|
18
|
+
if callbacks_file.exists():
|
|
19
|
+
try:
|
|
20
|
+
# Import the register_callbacks module dynamically
|
|
21
|
+
module_name = f"code_puppy.plugins.{plugin_name}.register_callbacks"
|
|
22
|
+
logger.debug(f"Loading plugin callbacks from {module_name}")
|
|
23
|
+
importlib.import_module(module_name)
|
|
24
|
+
logger.info(
|
|
25
|
+
f"Successfully loaded callbacks from plugin: {plugin_name}"
|
|
26
|
+
)
|
|
27
|
+
except ImportError as e:
|
|
28
|
+
logger.warning(
|
|
29
|
+
f"Failed to import callbacks from plugin {plugin_name}: {e}"
|
|
30
|
+
)
|
|
31
|
+
except Exception as e:
|
|
32
|
+
logger.error(f"Unexpected error loading plugin {plugin_name}: {e}")
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
"""
|
|
2
|
+
ReopenableAsyncClient - A reopenable httpx.AsyncClient wrapper.
|
|
3
|
+
|
|
4
|
+
This module provides a ReopenableAsyncClient class that extends httpx.AsyncClient
|
|
5
|
+
to support reopening after being closed, which the standard httpx.AsyncClient
|
|
6
|
+
doesn't support.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Optional, Union
|
|
10
|
+
|
|
11
|
+
import httpx
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ReopenableAsyncClient:
|
|
15
|
+
"""
|
|
16
|
+
A wrapper around httpx.AsyncClient that can be reopened after being closed.
|
|
17
|
+
|
|
18
|
+
Standard httpx.AsyncClient becomes unusable after calling aclose().
|
|
19
|
+
This class allows you to reopen the client and continue using it.
|
|
20
|
+
|
|
21
|
+
Example:
|
|
22
|
+
>>> client = ReopenableAsyncClient(timeout=30.0)
|
|
23
|
+
>>> await client.get("https://httpbin.org/get")
|
|
24
|
+
>>> await client.aclose()
|
|
25
|
+
>>> # Client is now closed, but can be reopened
|
|
26
|
+
>>> await client.reopen()
|
|
27
|
+
>>> await client.get("https://httpbin.org/get") # Works!
|
|
28
|
+
|
|
29
|
+
The client preserves all original configuration when reopening.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
class _StreamWrapper:
|
|
33
|
+
"""Async context manager wrapper for streaming responses."""
|
|
34
|
+
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
parent_client: "ReopenableAsyncClient",
|
|
38
|
+
method: str,
|
|
39
|
+
url: Union[str, httpx.URL],
|
|
40
|
+
**kwargs,
|
|
41
|
+
):
|
|
42
|
+
self.parent_client = parent_client
|
|
43
|
+
self.method = method
|
|
44
|
+
self.url = url
|
|
45
|
+
self.kwargs = kwargs
|
|
46
|
+
self._stream_context = None
|
|
47
|
+
|
|
48
|
+
async def __aenter__(self):
|
|
49
|
+
client = await self.parent_client._ensure_client_open()
|
|
50
|
+
self._stream_context = client.stream(self.method, self.url, **self.kwargs)
|
|
51
|
+
return await self._stream_context.__aenter__()
|
|
52
|
+
|
|
53
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
54
|
+
if self._stream_context:
|
|
55
|
+
return await self._stream_context.__aexit__(exc_type, exc_val, exc_tb)
|
|
56
|
+
|
|
57
|
+
def __init__(self, **kwargs):
|
|
58
|
+
"""
|
|
59
|
+
Initialize the ReopenableAsyncClient.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
**kwargs: All arguments that would be passed to httpx.AsyncClient()
|
|
63
|
+
"""
|
|
64
|
+
self._client_kwargs = kwargs.copy()
|
|
65
|
+
self._client: Optional[httpx.AsyncClient] = None
|
|
66
|
+
self._is_closed = True
|
|
67
|
+
|
|
68
|
+
async def _ensure_client_open(self) -> httpx.AsyncClient:
|
|
69
|
+
"""
|
|
70
|
+
Ensure the underlying client is open and ready to use.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
The active httpx.AsyncClient instance
|
|
74
|
+
|
|
75
|
+
Raises:
|
|
76
|
+
RuntimeError: If client cannot be opened
|
|
77
|
+
"""
|
|
78
|
+
if self._is_closed or self._client is None:
|
|
79
|
+
await self._create_client()
|
|
80
|
+
return self._client
|
|
81
|
+
|
|
82
|
+
async def _create_client(self) -> None:
|
|
83
|
+
"""Create a new httpx.AsyncClient with the stored configuration."""
|
|
84
|
+
if self._client is not None and not self._is_closed:
|
|
85
|
+
# Close existing client first
|
|
86
|
+
await self._client.aclose()
|
|
87
|
+
|
|
88
|
+
self._client = httpx.AsyncClient(**self._client_kwargs)
|
|
89
|
+
self._is_closed = False
|
|
90
|
+
|
|
91
|
+
async def reopen(self) -> None:
|
|
92
|
+
"""
|
|
93
|
+
Explicitly reopen the client after it has been closed.
|
|
94
|
+
|
|
95
|
+
This is useful when you want to reuse a client that was previously closed.
|
|
96
|
+
"""
|
|
97
|
+
await self._create_client()
|
|
98
|
+
|
|
99
|
+
async def aclose(self) -> None:
|
|
100
|
+
"""
|
|
101
|
+
Close the underlying httpx.AsyncClient.
|
|
102
|
+
|
|
103
|
+
After calling this, the client can be reopened using reopen() or
|
|
104
|
+
automatically when making the next request.
|
|
105
|
+
"""
|
|
106
|
+
if self._client is not None and not self._is_closed:
|
|
107
|
+
await self._client.aclose()
|
|
108
|
+
self._is_closed = True
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def is_closed(self) -> bool:
|
|
112
|
+
"""Check if the client is currently closed."""
|
|
113
|
+
return self._is_closed or self._client is None
|
|
114
|
+
|
|
115
|
+
# Delegate all httpx.AsyncClient methods to the underlying client
|
|
116
|
+
|
|
117
|
+
async def get(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
118
|
+
"""Make a GET request."""
|
|
119
|
+
client = await self._ensure_client_open()
|
|
120
|
+
return await client.get(url, **kwargs)
|
|
121
|
+
|
|
122
|
+
async def post(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
123
|
+
"""Make a POST request."""
|
|
124
|
+
client = await self._ensure_client_open()
|
|
125
|
+
return await client.post(url, **kwargs)
|
|
126
|
+
|
|
127
|
+
async def put(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
128
|
+
"""Make a PUT request."""
|
|
129
|
+
client = await self._ensure_client_open()
|
|
130
|
+
return await client.put(url, **kwargs)
|
|
131
|
+
|
|
132
|
+
async def patch(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
133
|
+
"""Make a PATCH request."""
|
|
134
|
+
client = await self._ensure_client_open()
|
|
135
|
+
return await client.patch(url, **kwargs)
|
|
136
|
+
|
|
137
|
+
async def delete(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
138
|
+
"""Make a DELETE request."""
|
|
139
|
+
client = await self._ensure_client_open()
|
|
140
|
+
return await client.delete(url, **kwargs)
|
|
141
|
+
|
|
142
|
+
async def head(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
143
|
+
"""Make a HEAD request."""
|
|
144
|
+
client = await self._ensure_client_open()
|
|
145
|
+
return await client.head(url, **kwargs)
|
|
146
|
+
|
|
147
|
+
async def options(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response:
|
|
148
|
+
"""Make an OPTIONS request."""
|
|
149
|
+
client = await self._ensure_client_open()
|
|
150
|
+
return await client.options(url, **kwargs)
|
|
151
|
+
|
|
152
|
+
async def request(
|
|
153
|
+
self, method: str, url: Union[str, httpx.URL], **kwargs
|
|
154
|
+
) -> httpx.Response:
|
|
155
|
+
"""Make a request with the specified HTTP method."""
|
|
156
|
+
client = await self._ensure_client_open()
|
|
157
|
+
return await client.request(method, url, **kwargs)
|
|
158
|
+
|
|
159
|
+
async def send(self, request: httpx.Request, **kwargs) -> httpx.Response:
|
|
160
|
+
"""Send a pre-built request."""
|
|
161
|
+
client = await self._ensure_client_open()
|
|
162
|
+
return await client.send(request, **kwargs)
|
|
163
|
+
|
|
164
|
+
def build_request(
|
|
165
|
+
self, method: str, url: Union[str, httpx.URL], **kwargs
|
|
166
|
+
) -> httpx.Request:
|
|
167
|
+
"""
|
|
168
|
+
Build a request without sending it.
|
|
169
|
+
|
|
170
|
+
Note: This creates a temporary client if none exists, but doesn't keep it open.
|
|
171
|
+
"""
|
|
172
|
+
if self._client is None or self._is_closed:
|
|
173
|
+
# Create a temporary client just for building the request
|
|
174
|
+
temp_client = httpx.AsyncClient(**self._client_kwargs)
|
|
175
|
+
try:
|
|
176
|
+
request = temp_client.build_request(method, url, **kwargs)
|
|
177
|
+
return request
|
|
178
|
+
finally:
|
|
179
|
+
# Clean up the temporary client synchronously if possible
|
|
180
|
+
# Note: This might leave a connection open, but it's better than
|
|
181
|
+
# making this method async just for building requests
|
|
182
|
+
pass
|
|
183
|
+
return self._client.build_request(method, url, **kwargs)
|
|
184
|
+
|
|
185
|
+
def stream(self, method: str, url: Union[str, httpx.URL], **kwargs):
|
|
186
|
+
"""Stream a request. Returns an async context manager."""
|
|
187
|
+
return self._StreamWrapper(self, method, url, **kwargs)
|
|
188
|
+
|
|
189
|
+
# Context manager support
|
|
190
|
+
async def __aenter__(self):
|
|
191
|
+
"""Async context manager entry."""
|
|
192
|
+
await self._ensure_client_open()
|
|
193
|
+
return self
|
|
194
|
+
|
|
195
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
196
|
+
"""Async context manager exit."""
|
|
197
|
+
await self.aclose()
|
|
198
|
+
|
|
199
|
+
# Properties that don't require an active client
|
|
200
|
+
@property
|
|
201
|
+
def timeout(self) -> Optional[httpx.Timeout]:
|
|
202
|
+
"""Get the configured timeout."""
|
|
203
|
+
return self._client_kwargs.get("timeout")
|
|
204
|
+
|
|
205
|
+
@property
|
|
206
|
+
def headers(self) -> httpx.Headers:
|
|
207
|
+
"""Get the configured headers."""
|
|
208
|
+
if self._client is not None:
|
|
209
|
+
return self._client.headers
|
|
210
|
+
# Return headers from kwargs if client doesn't exist
|
|
211
|
+
headers = self._client_kwargs.get("headers", {})
|
|
212
|
+
return httpx.Headers(headers)
|
|
213
|
+
|
|
214
|
+
@property
|
|
215
|
+
def cookies(self) -> httpx.Cookies:
|
|
216
|
+
"""Get the current cookies."""
|
|
217
|
+
if self._client is not None and not self._is_closed:
|
|
218
|
+
return self._client.cookies
|
|
219
|
+
# Return empty cookies if client doesn't exist or is closed
|
|
220
|
+
return httpx.Cookies()
|
|
221
|
+
|
|
222
|
+
def __repr__(self) -> str:
|
|
223
|
+
"""String representation of the client."""
|
|
224
|
+
status = "closed" if self.is_closed else "open"
|
|
225
|
+
return f"<ReopenableAsyncClient [{status}]>"
|
code_puppy/state_management.py
CHANGED
|
@@ -1,8 +1,66 @@
|
|
|
1
1
|
from typing import Any, List
|
|
2
2
|
|
|
3
|
-
from code_puppy.message_history_processor import message_history_processor
|
|
4
|
-
|
|
5
3
|
_message_history: List[Any] = []
|
|
4
|
+
_compacted_message_hashes = set()
|
|
5
|
+
_tui_mode: bool = False
|
|
6
|
+
_tui_app_instance: Any = None
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def add_compacted_message_hash(message_hash: str) -> None:
|
|
10
|
+
"""Add a message hash to the set of compacted message hashes."""
|
|
11
|
+
_compacted_message_hashes.add(message_hash)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_compacted_message_hashes():
|
|
15
|
+
"""Get the set of compacted message hashes."""
|
|
16
|
+
return _compacted_message_hashes
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def set_tui_mode(enabled: bool) -> None:
|
|
20
|
+
"""Set the global TUI mode state.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
enabled: True if running in TUI mode, False otherwise
|
|
24
|
+
"""
|
|
25
|
+
global _tui_mode
|
|
26
|
+
_tui_mode = enabled
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def is_tui_mode() -> bool:
|
|
30
|
+
"""Check if the application is running in TUI mode.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
True if running in TUI mode, False otherwise
|
|
34
|
+
"""
|
|
35
|
+
return _tui_mode
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def set_tui_app_instance(app_instance: Any) -> None:
|
|
39
|
+
"""Set the global TUI app instance reference.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
app_instance: The TUI app instance
|
|
43
|
+
"""
|
|
44
|
+
global _tui_app_instance
|
|
45
|
+
_tui_app_instance = app_instance
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def get_tui_app_instance() -> Any:
|
|
49
|
+
"""Get the current TUI app instance.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
The TUI app instance if available, None otherwise
|
|
53
|
+
"""
|
|
54
|
+
return _tui_app_instance
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def get_tui_mode() -> bool:
|
|
58
|
+
"""Get the current TUI mode state.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
True if running in TUI mode, False otherwise
|
|
62
|
+
"""
|
|
63
|
+
return _tui_mode
|
|
6
64
|
|
|
7
65
|
|
|
8
66
|
def get_message_history() -> List[Any]:
|
|
@@ -37,22 +95,3 @@ def hash_message(message):
|
|
|
37
95
|
else:
|
|
38
96
|
hashable_entities.append(part.content)
|
|
39
97
|
return hash(",".join(hashable_entities))
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def message_history_accumulator(messages: List[Any]):
|
|
43
|
-
global _message_history
|
|
44
|
-
|
|
45
|
-
message_history_hashes = set([hash_message(m) for m in _message_history])
|
|
46
|
-
for msg in messages:
|
|
47
|
-
if hash_message(msg) not in message_history_hashes:
|
|
48
|
-
_message_history.append(msg)
|
|
49
|
-
|
|
50
|
-
# Apply message history trimming using the main processor
|
|
51
|
-
# This ensures we maintain global state while still managing context limits
|
|
52
|
-
trimmed_messages = message_history_processor(_message_history)
|
|
53
|
-
|
|
54
|
-
# Update our global state with the trimmed version
|
|
55
|
-
# This preserves the state but keeps us within token limits
|
|
56
|
-
_message_history = trimmed_messages
|
|
57
|
-
|
|
58
|
-
return _message_history
|