hanzo 0.3.22__py3-none-any.whl → 0.3.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hanzo might be problematic. Click here for more details.
- hanzo/base_agent.py +516 -0
- hanzo/batch_orchestrator.py +988 -0
- hanzo/cli.py +7 -5
- hanzo/commands/chat.py +57 -16
- hanzo/commands/{cluster.py → node.py} +128 -128
- hanzo/commands/repl.py +5 -2
- hanzo/commands/router.py +152 -0
- hanzo/dev.py +407 -249
- hanzo/fallback_handler.py +79 -53
- hanzo/interactive/repl.py +52 -32
- hanzo/mcp_server.py +8 -3
- hanzo/memory_manager.py +146 -123
- hanzo/model_registry.py +399 -0
- hanzo/rate_limiter.py +60 -75
- hanzo/streaming.py +92 -71
- hanzo-0.3.24.dist-info/METADATA +276 -0
- {hanzo-0.3.22.dist-info → hanzo-0.3.24.dist-info}/RECORD +19 -15
- hanzo-0.3.22.dist-info/METADATA +0 -137
- {hanzo-0.3.22.dist-info → hanzo-0.3.24.dist-info}/WHEEL +0 -0
- {hanzo-0.3.22.dist-info → hanzo-0.3.24.dist-info}/entry_points.txt +0 -0
hanzo/cli.py
CHANGED
|
@@ -14,12 +14,13 @@ from .commands import (
|
|
|
14
14
|
mcp,
|
|
15
15
|
auth,
|
|
16
16
|
chat,
|
|
17
|
+
node,
|
|
17
18
|
repl,
|
|
18
19
|
agent,
|
|
19
20
|
miner,
|
|
20
21
|
tools,
|
|
21
22
|
config,
|
|
22
|
-
|
|
23
|
+
router,
|
|
23
24
|
network,
|
|
24
25
|
)
|
|
25
26
|
from .utils.output import console
|
|
@@ -72,7 +73,7 @@ def cli(ctx, verbose: bool, json: bool, config: Optional[str]):
|
|
|
72
73
|
# Register command groups
|
|
73
74
|
cli.add_command(agent.agent_group)
|
|
74
75
|
cli.add_command(auth.auth_group)
|
|
75
|
-
cli.add_command(cluster
|
|
76
|
+
cli.add_command(node.cluster)
|
|
76
77
|
cli.add_command(mcp.mcp_group)
|
|
77
78
|
cli.add_command(miner.miner_group)
|
|
78
79
|
cli.add_command(chat.chat_command)
|
|
@@ -80,6 +81,7 @@ cli.add_command(repl.repl_group)
|
|
|
80
81
|
cli.add_command(tools.tools_group)
|
|
81
82
|
cli.add_command(network.network_group)
|
|
82
83
|
cli.add_command(config.config_group)
|
|
84
|
+
cli.add_command(router.router_group)
|
|
83
85
|
|
|
84
86
|
|
|
85
87
|
# Quick aliases
|
|
@@ -95,12 +97,12 @@ def ask(ctx, prompt: tuple, model: str, local: bool):
|
|
|
95
97
|
|
|
96
98
|
|
|
97
99
|
@cli.command()
|
|
98
|
-
@click.option("--name", "-n", default="hanzo-local", help="
|
|
100
|
+
@click.option("--name", "-n", default="hanzo-local", help="Node name")
|
|
99
101
|
@click.option("--port", "-p", default=8000, help="API port")
|
|
100
102
|
@click.pass_context
|
|
101
103
|
def serve(ctx, name: str, port: int):
|
|
102
|
-
"""Start local AI
|
|
103
|
-
asyncio.run(
|
|
104
|
+
"""Start local AI node (alias for 'hanzo node start')."""
|
|
105
|
+
asyncio.run(node.start_node(ctx, name, port))
|
|
104
106
|
|
|
105
107
|
|
|
106
108
|
@cli.command()
|
hanzo/commands/chat.py
CHANGED
|
@@ -88,20 +88,32 @@ async def ask_once(
|
|
|
88
88
|
|
|
89
89
|
try:
|
|
90
90
|
if local:
|
|
91
|
-
#
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
91
|
+
# Try router first, then fall back to local node
|
|
92
|
+
base_urls = [
|
|
93
|
+
"http://localhost:4000", # Hanzo router default port
|
|
94
|
+
"http://localhost:8000", # Local node port
|
|
95
|
+
]
|
|
96
|
+
|
|
97
|
+
base_url = None
|
|
98
|
+
for url in base_urls:
|
|
99
|
+
try:
|
|
100
|
+
async with httpx.AsyncClient() as client:
|
|
101
|
+
await client.get(f"{url}/health", timeout=1.0)
|
|
102
|
+
base_url = url
|
|
103
|
+
break
|
|
104
|
+
except (httpx.ConnectError, httpx.TimeoutException):
|
|
105
|
+
continue
|
|
106
|
+
|
|
107
|
+
if not base_url:
|
|
99
108
|
console.print(
|
|
100
|
-
"[yellow]
|
|
109
|
+
"[yellow]No local AI server running.[/yellow]\n"
|
|
110
|
+
"Start one of:\n"
|
|
111
|
+
" • Hanzo router: hanzo router start\n"
|
|
112
|
+
" • Local node: hanzo serve"
|
|
101
113
|
)
|
|
102
114
|
return
|
|
103
115
|
|
|
104
|
-
# Make request to local
|
|
116
|
+
# Make request to local node
|
|
105
117
|
async with httpx.AsyncClient() as client:
|
|
106
118
|
response = await client.post(
|
|
107
119
|
f"{base_url}/v1/chat/completions",
|
|
@@ -113,16 +125,31 @@ async def ask_once(
|
|
|
113
125
|
else:
|
|
114
126
|
# Use cloud API
|
|
115
127
|
try:
|
|
116
|
-
|
|
128
|
+
# Try different import paths
|
|
129
|
+
try:
|
|
130
|
+
from hanzoai import completion
|
|
131
|
+
except ImportError:
|
|
132
|
+
try:
|
|
133
|
+
from pkg.hanzoai import completion
|
|
134
|
+
except ImportError:
|
|
135
|
+
# Fallback to using litellm directly
|
|
136
|
+
import litellm
|
|
137
|
+
def completion(**kwargs):
|
|
138
|
+
import os
|
|
139
|
+
api_key = os.getenv("HANZO_API_KEY")
|
|
140
|
+
if api_key:
|
|
141
|
+
kwargs["api_key"] = api_key
|
|
142
|
+
kwargs["api_base"] = "https://api.hanzo.ai/v1"
|
|
143
|
+
return litellm.completion(**kwargs)
|
|
117
144
|
|
|
118
145
|
result = completion(
|
|
119
146
|
model=f"anthropic/{model}" if "claude" in model else model,
|
|
120
147
|
messages=messages,
|
|
121
148
|
)
|
|
122
149
|
content = result.choices[0].message.content
|
|
123
|
-
except ImportError:
|
|
124
|
-
console.print("[red]Error:[/red]
|
|
125
|
-
console.print("Install with: pip install
|
|
150
|
+
except ImportError as e:
|
|
151
|
+
console.print(f"[red]Error:[/red] Missing dependencies: {e}")
|
|
152
|
+
console.print("Install with: pip install litellm")
|
|
126
153
|
return
|
|
127
154
|
|
|
128
155
|
# Display response
|
|
@@ -166,7 +193,7 @@ async def interactive_chat(ctx, model: str, local: bool, system: Optional[str]):
|
|
|
166
193
|
console.print("AI: ", end="")
|
|
167
194
|
with console.status(""):
|
|
168
195
|
if local:
|
|
169
|
-
# Use local
|
|
196
|
+
# Use local node
|
|
170
197
|
async with httpx.AsyncClient() as client:
|
|
171
198
|
response = await client.post(
|
|
172
199
|
"http://localhost:8000/v1/chat/completions",
|
|
@@ -181,7 +208,21 @@ async def interactive_chat(ctx, model: str, local: bool, system: Optional[str]):
|
|
|
181
208
|
content = result["choices"][0]["message"]["content"]
|
|
182
209
|
else:
|
|
183
210
|
# Use cloud API
|
|
184
|
-
|
|
211
|
+
try:
|
|
212
|
+
from hanzoai import completion
|
|
213
|
+
except ImportError:
|
|
214
|
+
try:
|
|
215
|
+
from pkg.hanzoai import completion
|
|
216
|
+
except ImportError:
|
|
217
|
+
# Fallback to using litellm directly
|
|
218
|
+
import litellm
|
|
219
|
+
def completion(**kwargs):
|
|
220
|
+
import os
|
|
221
|
+
api_key = os.getenv("HANZO_API_KEY")
|
|
222
|
+
if api_key:
|
|
223
|
+
kwargs["api_key"] = api_key
|
|
224
|
+
kwargs["api_base"] = "https://api.hanzo.ai/v1"
|
|
225
|
+
return litellm.completion(**kwargs)
|
|
185
226
|
|
|
186
227
|
result = completion(
|
|
187
228
|
model=f"anthropic/{model}" if "claude" in model else model,
|