hanzo 0.3.22__py3-none-any.whl → 0.3.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hanzo might be problematic. Click here for more details.

hanzo/fallback_handler.py CHANGED
@@ -6,22 +6,25 @@ Automatically tries available AI options when primary fails.
6
6
  import os
7
7
  import shutil
8
8
  import subprocess
9
- from typing import Optional, Dict, Any
9
+ from typing import Any, Dict, Optional
10
10
  from pathlib import Path
11
11
 
12
+
12
13
  class FallbackHandler:
13
14
  """Handles automatic fallback to available AI options."""
14
-
15
+
15
16
  def __init__(self):
16
17
  self.available_options = self._detect_available_options()
17
18
  self.fallback_order = self._determine_fallback_order()
18
-
19
+
19
20
  def _detect_available_options(self) -> Dict[str, bool]:
20
21
  """Detect which AI options are available."""
21
22
  options = {
22
23
  "openai_api": bool(os.getenv("OPENAI_API_KEY")),
23
24
  "anthropic_api": bool(os.getenv("ANTHROPIC_API_KEY")),
24
- "google_api": bool(os.getenv("GOOGLE_API_KEY") or os.getenv("GEMINI_API_KEY")),
25
+ "google_api": bool(
26
+ os.getenv("GOOGLE_API_KEY") or os.getenv("GEMINI_API_KEY")
27
+ ),
25
28
  "openai_cli": shutil.which("openai") is not None,
26
29
  "claude_cli": shutil.which("claude") is not None,
27
30
  "gemini_cli": shutil.which("gemini") is not None,
@@ -30,24 +33,25 @@ class FallbackHandler:
30
33
  "free_apis": True, # Always available (Codestral, StarCoder)
31
34
  }
32
35
  return options
33
-
36
+
34
37
  def _check_ollama(self) -> bool:
35
38
  """Check if Ollama is running and has models."""
36
39
  try:
37
40
  import httpx
41
+
38
42
  with httpx.Client(timeout=2.0) as client:
39
43
  response = client.get("http://localhost:11434/api/tags")
40
44
  if response.status_code == 200:
41
45
  data = response.json()
42
46
  return len(data.get("models", [])) > 0
43
- except:
47
+ except Exception:
44
48
  pass
45
49
  return False
46
-
50
+
47
51
  def _determine_fallback_order(self) -> list:
48
52
  """Determine the order of fallback options based on availability."""
49
53
  order = []
50
-
54
+
51
55
  # Priority 1: API keys (fastest, most reliable)
52
56
  if self.available_options["openai_api"]:
53
57
  order.append(("openai_api", "gpt-4"))
@@ -55,7 +59,7 @@ class FallbackHandler:
55
59
  order.append(("anthropic_api", "claude-3-5-sonnet"))
56
60
  if self.available_options["google_api"]:
57
61
  order.append(("google_api", "gemini-pro"))
58
-
62
+
59
63
  # Priority 2: CLI tools (no API key needed)
60
64
  if self.available_options["openai_cli"]:
61
65
  order.append(("openai_cli", "codex"))
@@ -63,65 +67,72 @@ class FallbackHandler:
63
67
  order.append(("claude_cli", "claude-desktop"))
64
68
  if self.available_options["gemini_cli"]:
65
69
  order.append(("gemini_cli", "gemini"))
66
-
70
+
67
71
  # Priority 3: Local models (free, but requires setup)
68
72
  if self.available_options["ollama"]:
69
73
  order.append(("ollama", "local:llama3.2"))
70
74
  if self.available_options["hanzo_ide"]:
71
75
  order.append(("hanzo_ide", "hanzo-ide"))
72
-
76
+
73
77
  # Priority 4: Free cloud APIs (rate limited)
74
78
  if self.available_options["free_apis"]:
75
79
  order.append(("free_api", "codestral-free"))
76
80
  order.append(("free_api", "starcoder2"))
77
-
81
+
78
82
  return order
79
-
83
+
80
84
  def get_best_option(self) -> Optional[tuple]:
81
85
  """Get the best available AI option."""
82
86
  if self.fallback_order:
83
87
  return self.fallback_order[0]
84
88
  return None
85
-
89
+
86
90
  def get_next_option(self, failed_option: str) -> Optional[tuple]:
87
91
  """Get the next fallback option after one fails."""
88
92
  for i, (option_type, model) in enumerate(self.fallback_order):
89
93
  if model == failed_option and i + 1 < len(self.fallback_order):
90
94
  return self.fallback_order[i + 1]
91
95
  return None
92
-
96
+
93
97
  def suggest_setup(self) -> str:
94
98
  """Suggest setup instructions for unavailable options."""
95
99
  suggestions = []
96
-
100
+
97
101
  if not self.available_options["openai_api"]:
98
102
  suggestions.append("• Set OPENAI_API_KEY for GPT-4/GPT-5 access")
99
-
103
+
100
104
  if not self.available_options["anthropic_api"]:
101
105
  suggestions.append("• Set ANTHROPIC_API_KEY for Claude access")
102
-
106
+
103
107
  if not self.available_options["ollama"]:
104
- suggestions.append("• Install Ollama: curl -fsSL https://ollama.com/install.sh | sh")
108
+ suggestions.append(
109
+ "• Install Ollama: curl -fsSL https://ollama.com/install.sh | sh"
110
+ )
105
111
  suggestions.append(" Then run: ollama pull llama3.2")
106
-
112
+
107
113
  if not self.available_options["openai_cli"]:
108
114
  suggestions.append("• Install OpenAI CLI: pip install openai-cli")
109
-
115
+
110
116
  if not self.available_options["claude_cli"]:
111
- suggestions.append("• Install Claude Desktop from https://claude.ai/download")
112
-
113
- return "\n".join(suggestions) if suggestions else "All AI options are available!"
114
-
117
+ suggestions.append(
118
+ "• Install Claude Desktop from https://claude.ai/download"
119
+ )
120
+
121
+ return (
122
+ "\n".join(suggestions) if suggestions else "All AI options are available!"
123
+ )
124
+
115
125
  def print_status(self, console):
116
126
  """Print the current status of available AI options."""
117
127
  from rich.table import Table
118
-
119
- table = Table(title="Available AI Options", show_header=True,
120
- header_style="bold magenta")
128
+
129
+ table = Table(
130
+ title="Available AI Options", show_header=True, header_style="bold magenta"
131
+ )
121
132
  table.add_column("Option", style="cyan", width=20)
122
133
  table.add_column("Status", width=10)
123
134
  table.add_column("Model", width=20)
124
-
135
+
125
136
  status_map = {
126
137
  "openai_api": ("OpenAI API", "gpt-4"),
127
138
  "anthropic_api": ("Anthropic API", "claude-3-5"),
@@ -133,17 +144,19 @@ class FallbackHandler:
133
144
  "hanzo_ide": ("Hanzo IDE", "hanzo-dev"),
134
145
  "free_apis": ("Free APIs", "codestral/starcoder"),
135
146
  }
136
-
147
+
137
148
  for key, available in self.available_options.items():
138
149
  if key in status_map:
139
150
  name, model = status_map[key]
140
151
  status = "✅" if available else "❌"
141
152
  table.add_row(name, status, model if available else "Not available")
142
-
153
+
143
154
  console.print(table)
144
-
155
+
145
156
  if self.fallback_order:
146
- console.print(f"\n[green]Primary option: {self.fallback_order[0][1]}[/green]")
157
+ console.print(
158
+ f"\n[green]Primary option: {self.fallback_order[0][1]}[/green]"
159
+ )
147
160
  if len(self.fallback_order) > 1:
148
161
  fallbacks = ", ".join([opt[1] for opt in self.fallback_order[1:]])
149
162
  console.print(f"[yellow]Fallback options: {fallbacks}[/yellow]")
@@ -159,91 +172,104 @@ async def smart_chat(message: str, console=None) -> Optional[str]:
159
172
  Returns the AI response or None if all options fail.
160
173
  """
161
174
  from .rate_limiter import smart_limiter
162
-
175
+
163
176
  handler = FallbackHandler()
164
-
177
+
165
178
  if console:
166
179
  console.print("\n[dim]Detecting available AI options...[/dim]")
167
-
180
+
168
181
  best_option = handler.get_best_option()
169
182
  if not best_option:
170
183
  if console:
171
184
  handler.print_status(console)
172
185
  return None
173
-
186
+
174
187
  option_type, model = best_option
175
-
188
+
176
189
  # Try the primary option with rate limiting
177
190
  try:
178
191
  if option_type == "openai_api":
192
+
179
193
  async def call_openai():
180
194
  from openai import AsyncOpenAI
195
+
181
196
  client = AsyncOpenAI()
182
197
  response = await client.chat.completions.create(
183
198
  model="gpt-4",
184
199
  messages=[{"role": "user", "content": message}],
185
- max_tokens=500
200
+ max_tokens=500,
186
201
  )
187
202
  return response.choices[0].message.content
188
-
203
+
189
204
  return await smart_limiter.execute_with_limit("openai", call_openai)
190
-
205
+
191
206
  elif option_type == "anthropic_api":
192
207
  from anthropic import AsyncAnthropic
208
+
193
209
  client = AsyncAnthropic()
194
210
  response = await client.messages.create(
195
211
  model="claude-3-5-sonnet-20241022",
196
212
  messages=[{"role": "user", "content": message}],
197
- max_tokens=500
213
+ max_tokens=500,
198
214
  )
199
215
  return response.content[0].text
200
-
216
+
201
217
  elif option_type == "openai_cli":
202
218
  # Use OpenAI CLI
203
219
  result = subprocess.run(
204
- ["openai", "api", "chat.completions.create", "-m", "gpt-4", "-g", message],
220
+ [
221
+ "openai",
222
+ "api",
223
+ "chat.completions.create",
224
+ "-m",
225
+ "gpt-4",
226
+ "-g",
227
+ message,
228
+ ],
205
229
  capture_output=True,
206
230
  text=True,
207
- timeout=30
231
+ timeout=30,
208
232
  )
209
233
  if result.returncode == 0:
210
234
  return result.stdout.strip()
211
-
235
+
212
236
  elif option_type == "ollama":
213
237
  # Use Ollama
214
238
  import httpx
239
+
215
240
  async with httpx.AsyncClient() as client:
216
241
  response = await client.post(
217
242
  "http://localhost:11434/api/generate",
218
243
  json={"model": "llama3.2", "prompt": message, "stream": False},
219
- timeout=30.0
244
+ timeout=30.0,
220
245
  )
221
246
  if response.status_code == 200:
222
247
  return response.json().get("response", "")
223
-
248
+
224
249
  elif option_type == "free_api":
225
250
  # Try free Codestral API
226
251
  import httpx
252
+
227
253
  async with httpx.AsyncClient() as client:
228
254
  response = await client.post(
229
255
  "https://codestral.mistral.ai/v1/fim/completions",
230
256
  headers={"Content-Type": "application/json"},
231
257
  json={"prompt": message, "suffix": "", "max_tokens": 500},
232
- timeout=30.0
258
+ timeout=30.0,
233
259
  )
234
260
  if response.status_code == 200:
235
261
  return response.json().get("choices", [{}])[0].get("text", "")
236
-
262
+
237
263
  except Exception as e:
238
264
  if console:
239
265
  console.print(f"[yellow]Primary option {model} failed: {e}[/yellow]")
240
266
  console.print("[dim]Trying fallback...[/dim]")
241
-
267
+
242
268
  # Try next fallback
243
269
  next_option = handler.get_next_option(model)
244
270
  if next_option:
245
271
  # Recursively try the next option
246
272
  handler.fallback_order.remove(best_option)
247
273
  return await smart_chat(message, console)
248
-
249
- return None
274
+
275
+ return None
hanzo/interactive/repl.py CHANGED
@@ -35,8 +35,11 @@ class HanzoREPL:
35
35
  # Don't print welcome message here since it's already printed in cli.py
36
36
 
37
37
  # Set up command completer
38
+ cli_commands = ["chat", "ask", "agent", "node", "mcp", "network",
39
+ "auth", "config", "tools", "miner", "serve", "net",
40
+ "dev", "router"]
38
41
  completer = WordCompleter(
39
- list(self.commands.keys()) + ["chat", "agent", "cluster", "mcp", "network"],
42
+ list(self.commands.keys()) + cli_commands,
40
43
  ignore_case=True,
41
44
  )
42
45
 
@@ -58,8 +61,12 @@ class HanzoREPL:
58
61
  # Execute command
59
62
  if cmd in self.commands:
60
63
  await self.commands[cmd](args)
61
- else:
64
+ elif cmd in ["chat", "ask", "agent", "node", "mcp", "network", "auth", "config", "tools", "miner", "serve", "net", "dev", "router"]:
65
+ # Execute known CLI commands
62
66
  await self.execute_command(cmd, args)
67
+ else:
68
+ # Treat as chat message if not a known command
69
+ await self.chat_with_ai(command)
63
70
 
64
71
  except KeyboardInterrupt:
65
72
  continue
@@ -83,7 +90,7 @@ class HanzoREPL:
83
90
  All Hanzo CLI commands are available:
84
91
  - `chat <message>` - Chat with AI
85
92
  - `agent start` - Start an agent
86
- - `cluster status` - Check cluster status
93
+ - `node status` - Check node status
87
94
  - `mcp tools` - List MCP tools
88
95
  - `network agents` - List network agents
89
96
 
@@ -91,7 +98,7 @@ All Hanzo CLI commands are available:
91
98
  ```
92
99
  hanzo> chat How do I create a Python web server?
93
100
  hanzo> agent list
94
- hanzo> cluster start --models llama-3.2-3b
101
+ hanzo> node start --models llama-3.2-3b
95
102
  hanzo> mcp run read_file --arg path=README.md
96
103
  ```
97
104
 
@@ -114,53 +121,60 @@ hanzo> mcp run read_file --arg path=README.md
114
121
  async def show_status(self, args: str = ""):
115
122
  """Show system status."""
116
123
  status = {
117
- "cluster": await self.check_cluster_status(),
124
+ "node": await self.check_node_status(),
118
125
  "agents": await self.count_agents(),
119
126
  "auth": self.check_auth_status(),
120
127
  }
121
128
 
122
129
  self.console.print("[cyan]System Status:[/cyan]")
123
- self.console.print(f" Cluster: {status['cluster']}")
130
+ self.console.print(f" Node: {status['node']}")
124
131
  self.console.print(f" Agents: {status['agents']}")
125
132
  self.console.print(f" Auth: {status['auth']}")
126
133
 
127
134
  async def execute_command(self, cmd: str, args: str):
128
135
  """Execute a CLI command."""
129
- # Import here to avoid circular imports
136
+ import os
130
137
  import sys
131
-
132
- import click
133
-
134
- from .. import cli
135
-
136
- # Build command line
137
- argv = [cmd]
138
+ import shutil
139
+ import subprocess
140
+
141
+ # Find hanzo executable
142
+ hanzo_cmd = shutil.which("hanzo")
143
+ if not hanzo_cmd:
144
+ # Try using Python module directly
145
+ hanzo_cmd = sys.executable
146
+ argv = [hanzo_cmd, "-m", "hanzo", cmd]
147
+ else:
148
+ argv = [hanzo_cmd, cmd]
149
+
138
150
  if args:
139
151
  import shlex
140
-
141
152
  argv.extend(shlex.split(args))
142
153
 
143
- # Create a new context
154
+ # Execute as subprocess to avoid context issues
144
155
  try:
145
- # Save original argv
146
- orig_argv = sys.argv
147
- sys.argv = ["hanzo"] + argv
148
-
149
- # Execute command
150
- ctx = click.Context(cli.cli)
151
- cli.cli.invoke(ctx)
152
-
153
- except SystemExit:
154
- # Catch exit from commands
155
- pass
156
+ result = subprocess.run(
157
+ argv,
158
+ capture_output=True,
159
+ text=True,
160
+ timeout=30,
161
+ env=os.environ.copy() # Pass environment variables
162
+ )
163
+
164
+ if result.stdout:
165
+ self.console.print(result.stdout.rstrip())
166
+ if result.stderr and result.returncode != 0:
167
+ self.console.print(f"[red]{result.stderr.rstrip()}[/red]")
168
+
169
+ except subprocess.TimeoutExpired:
170
+ self.console.print("[red]Command timed out[/red]")
171
+ except FileNotFoundError:
172
+ self.console.print("[red]Command not found. Make sure 'hanzo' is installed.[/red]")
156
173
  except Exception as e:
157
174
  self.console.print(f"[red]Command error: {e}[/red]")
158
- finally:
159
- # Restore argv
160
- sys.argv = orig_argv
161
175
 
162
- async def check_cluster_status(self) -> str:
163
- """Check if cluster is running."""
176
+ async def check_node_status(self) -> str:
177
+ """Check if node is running."""
164
178
  try:
165
179
  import httpx
166
180
 
@@ -185,3 +199,9 @@ hanzo> mcp run read_file --arg path=README.md
185
199
  return "authenticated (saved)"
186
200
  else:
187
201
  return "not authenticated"
202
+
203
+ async def chat_with_ai(self, message: str):
204
+ """Chat with AI when user types natural language."""
205
+ # For natural language input, try to use it as a chat message
206
+ # Default to cloud mode to avoid needing local server
207
+ await self.execute_command("ask", f"--cloud {message}")
hanzo/mcp_server.py CHANGED
@@ -6,11 +6,16 @@ import click
6
6
 
7
7
 
8
8
  def main():
9
- """Start the Hanzo MCP server."""
9
+ """Start the Hanzo MCP server.
10
+
11
+ This wrapper defers to hanzo_mcp.cli:main so that the CLI can parse
12
+ transport flags and configure logging BEFORE importing any heavy modules,
13
+ preventing stdio protocol corruption.
14
+ """
10
15
  try:
11
- from hanzo_mcp.server import main as mcp_main
16
+ from hanzo_mcp.cli import main as cli_main
12
17
 
13
- mcp_main()
18
+ cli_main()
14
19
  except ImportError:
15
20
  click.echo(
16
21
  "Error: hanzo-mcp is not installed. Please run: pip install hanzo[mcp] or pip install hanzo[all]",