hanzo 0.3.23__py3-none-any.whl → 0.3.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hanzo might be problematic. Click here for more details.

hanzo/base_agent.py CHANGED
@@ -10,13 +10,12 @@ import os
10
10
  import asyncio
11
11
  import logging
12
12
  from abc import ABC, abstractmethod
13
- from typing import Any, Dict, List, Optional, Protocol, TypeVar, Generic
14
- from dataclasses import dataclass, field
15
- from datetime import datetime
13
+ from typing import Any, Dict, List, Generic, TypeVar, Optional, Protocol
16
14
  from pathlib import Path
15
+ from datetime import datetime
16
+ from dataclasses import field, dataclass
17
17
 
18
- from .model_registry import registry, ModelConfig
19
-
18
+ from .model_registry import ModelConfig, registry
20
19
 
21
20
  logger = logging.getLogger(__name__)
22
21
 
@@ -5,28 +5,28 @@ consensus mechanisms, and critic chains using the unified base classes.
5
5
  """
6
6
 
7
7
  import re
8
+ import json
8
9
  import asyncio
9
10
  import logging
10
- from typing import Any, Dict, List, Optional, AsyncIterator, Callable
11
- from dataclasses import dataclass, field
12
- from datetime import datetime
13
- from pathlib import Path
14
- import json
15
11
  import subprocess
12
+ from typing import Any, Dict, List, Callable, Optional, AsyncIterator
13
+ from pathlib import Path
14
+ from datetime import datetime
15
+ from dataclasses import field, dataclass
16
16
 
17
- from rich.console import Console
18
- from rich.progress import Progress, TaskID, TextColumn, SpinnerColumn, BarColumn
19
- from rich.table import Table
20
17
  from rich.panel import Panel
18
+ from rich.table import Table
19
+ from rich.console import Console
20
+ from rich.progress import TaskID, Progress, BarColumn, TextColumn, SpinnerColumn
21
21
 
22
22
  try:
23
23
  # Try to import from hanzo-mcp if available
24
- from hanzo_mcp.core.model_registry import registry
25
24
  from hanzo_mcp.core.base_agent import AgentConfig, AgentResult, AgentOrchestrator
25
+ from hanzo_mcp.core.model_registry import registry
26
26
  except ImportError:
27
27
  # Fall back to local imports if hanzo-mcp is not installed
28
- from .model_registry import registry
29
28
  from .base_agent import AgentConfig, AgentResult, AgentOrchestrator
29
+ from .model_registry import registry
30
30
 
31
31
  logger = logging.getLogger(__name__)
32
32
  console = Console()
@@ -906,7 +906,7 @@ class MetaAIOrchestrator:
906
906
  )
907
907
  try:
908
908
  return json.loads(result)
909
- except:
909
+ except Exception:
910
910
  pass
911
911
 
912
912
  # Fallback intent detection
hanzo/cli.py CHANGED
@@ -14,12 +14,13 @@ from .commands import (
14
14
  mcp,
15
15
  auth,
16
16
  chat,
17
+ node,
17
18
  repl,
18
19
  agent,
19
20
  miner,
20
21
  tools,
21
22
  config,
22
- cluster,
23
+ router,
23
24
  network,
24
25
  )
25
26
  from .utils.output import console
@@ -72,7 +73,7 @@ def cli(ctx, verbose: bool, json: bool, config: Optional[str]):
72
73
  # Register command groups
73
74
  cli.add_command(agent.agent_group)
74
75
  cli.add_command(auth.auth_group)
75
- cli.add_command(cluster.cluster_group)
76
+ cli.add_command(node.cluster)
76
77
  cli.add_command(mcp.mcp_group)
77
78
  cli.add_command(miner.miner_group)
78
79
  cli.add_command(chat.chat_command)
@@ -80,6 +81,7 @@ cli.add_command(repl.repl_group)
80
81
  cli.add_command(tools.tools_group)
81
82
  cli.add_command(network.network_group)
82
83
  cli.add_command(config.config_group)
84
+ cli.add_command(router.router_group)
83
85
 
84
86
 
85
87
  # Quick aliases
@@ -95,12 +97,12 @@ def ask(ctx, prompt: tuple, model: str, local: bool):
95
97
 
96
98
 
97
99
  @cli.command()
98
- @click.option("--name", "-n", default="hanzo-local", help="Cluster name")
100
+ @click.option("--name", "-n", default="hanzo-local", help="Node name")
99
101
  @click.option("--port", "-p", default=8000, help="API port")
100
102
  @click.pass_context
101
103
  def serve(ctx, name: str, port: int):
102
- """Start local AI cluster (alias for 'hanzo cluster start')."""
103
- asyncio.run(cluster.start_cluster(ctx, name, port))
104
+ """Start local AI node (alias for 'hanzo node start')."""
105
+ asyncio.run(node.start_node(ctx, name, port))
104
106
 
105
107
 
106
108
  @cli.command()
hanzo/commands/chat.py CHANGED
@@ -88,20 +88,32 @@ async def ask_once(
88
88
 
89
89
  try:
90
90
  if local:
91
- # Use local cluster
92
- base_url = "http://localhost:8000"
93
-
94
- # Check if cluster is running
95
- try:
96
- async with httpx.AsyncClient() as client:
97
- await client.get(f"{base_url}/health")
98
- except httpx.ConnectError:
91
+ # Try router first, then fall back to local node
92
+ base_urls = [
93
+ "http://localhost:4000", # Hanzo router default port
94
+ "http://localhost:8000", # Local node port
95
+ ]
96
+
97
+ base_url = None
98
+ for url in base_urls:
99
+ try:
100
+ async with httpx.AsyncClient() as client:
101
+ await client.get(f"{url}/health", timeout=1.0)
102
+ base_url = url
103
+ break
104
+ except (httpx.ConnectError, httpx.TimeoutException):
105
+ continue
106
+
107
+ if not base_url:
99
108
  console.print(
100
- "[yellow]Local cluster not running. Start with: hanzo serve[/yellow]"
109
+ "[yellow]No local AI server running.[/yellow]\n"
110
+ "Start one of:\n"
111
+ " • Hanzo router: hanzo router start\n"
112
+ " • Local node: hanzo serve"
101
113
  )
102
114
  return
103
115
 
104
- # Make request to local cluster
116
+ # Make request to local node
105
117
  async with httpx.AsyncClient() as client:
106
118
  response = await client.post(
107
119
  f"{base_url}/v1/chat/completions",
@@ -113,16 +125,31 @@ async def ask_once(
113
125
  else:
114
126
  # Use cloud API
115
127
  try:
116
- from hanzoai import completion
128
+ # Try different import paths
129
+ try:
130
+ from hanzoai import completion
131
+ except ImportError:
132
+ try:
133
+ from pkg.hanzoai import completion
134
+ except ImportError:
135
+ # Fallback to using litellm directly
136
+ import litellm
137
+ def completion(**kwargs):
138
+ import os
139
+ api_key = os.getenv("HANZO_API_KEY")
140
+ if api_key:
141
+ kwargs["api_key"] = api_key
142
+ kwargs["api_base"] = "https://api.hanzo.ai/v1"
143
+ return litellm.completion(**kwargs)
117
144
 
118
145
  result = completion(
119
146
  model=f"anthropic/{model}" if "claude" in model else model,
120
147
  messages=messages,
121
148
  )
122
149
  content = result.choices[0].message.content
123
- except ImportError:
124
- console.print("[red]Error:[/red] hanzoai not installed")
125
- console.print("Install with: pip install hanzo[all]")
150
+ except ImportError as e:
151
+ console.print(f"[red]Error:[/red] Missing dependencies: {e}")
152
+ console.print("Install with: pip install litellm")
126
153
  return
127
154
 
128
155
  # Display response
@@ -166,7 +193,7 @@ async def interactive_chat(ctx, model: str, local: bool, system: Optional[str]):
166
193
  console.print("AI: ", end="")
167
194
  with console.status(""):
168
195
  if local:
169
- # Use local cluster
196
+ # Use local node
170
197
  async with httpx.AsyncClient() as client:
171
198
  response = await client.post(
172
199
  "http://localhost:8000/v1/chat/completions",
@@ -181,7 +208,21 @@ async def interactive_chat(ctx, model: str, local: bool, system: Optional[str]):
181
208
  content = result["choices"][0]["message"]["content"]
182
209
  else:
183
210
  # Use cloud API
184
- from hanzoai import completion
211
+ try:
212
+ from hanzoai import completion
213
+ except ImportError:
214
+ try:
215
+ from pkg.hanzoai import completion
216
+ except ImportError:
217
+ # Fallback to using litellm directly
218
+ import litellm
219
+ def completion(**kwargs):
220
+ import os
221
+ api_key = os.getenv("HANZO_API_KEY")
222
+ if api_key:
223
+ kwargs["api_key"] = api_key
224
+ kwargs["api_base"] = "https://api.hanzo.ai/v1"
225
+ return litellm.completion(**kwargs)
185
226
 
186
227
  result = completion(
187
228
  model=f"anthropic/{model}" if "claude" in model else model,