hanzo 0.3.5__tar.gz → 0.3.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hanzo might be problematic. Click here for more details.
- hanzo-0.3.7/.gitignore +16 -0
- hanzo-0.3.7/PKG-INFO +138 -0
- hanzo-0.3.7/README.md +84 -0
- hanzo-0.3.7/pyproject.toml +92 -0
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/__init__.py +1 -1
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/__main__.py +1 -1
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/cli.py +83 -49
- hanzo-0.3.7/src/hanzo/commands/__init__.py +14 -0
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/agent.py +18 -21
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/auth.py +63 -59
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/chat.py +54 -35
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/cluster.py +109 -78
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/config.py +39 -38
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/mcp.py +63 -42
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/miner.py +71 -58
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/network.py +64 -55
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/repl.py +37 -30
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/commands/tools.py +52 -67
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/interactive/__init__.py +1 -1
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/interactive/dashboard.py +34 -44
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/interactive/repl.py +35 -32
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/mcp_server.py +7 -2
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/repl.py +13 -3
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/router/__init__.py +21 -9
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/utils/__init__.py +1 -1
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/utils/config.py +37 -35
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/utils/net_check.py +33 -29
- {hanzo-0.3.5 → hanzo-0.3.7/src}/hanzo/utils/output.py +23 -18
- hanzo-0.3.5/PKG-INFO +0 -76
- hanzo-0.3.5/hanzo/commands/__init__.py +0 -3
- hanzo-0.3.5/hanzo.egg-info/PKG-INFO +0 -76
- hanzo-0.3.5/hanzo.egg-info/SOURCES.txt +0 -31
- hanzo-0.3.5/hanzo.egg-info/dependency_links.txt +0 -1
- hanzo-0.3.5/hanzo.egg-info/entry_points.txt +0 -2
- hanzo-0.3.5/hanzo.egg-info/requires.txt +0 -8
- hanzo-0.3.5/hanzo.egg-info/top_level.txt +0 -1
- hanzo-0.3.5/setup.cfg +0 -4
- hanzo-0.3.5/setup.py +0 -80
hanzo-0.3.7/.gitignore
ADDED
hanzo-0.3.7/PKG-INFO
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: hanzo
|
|
3
|
+
Version: 0.3.7
|
|
4
|
+
Summary: Hanzo AI - Complete AI Infrastructure Platform with CLI, Router, MCP, and Agent Runtime
|
|
5
|
+
Project-URL: Homepage, https://hanzo.ai
|
|
6
|
+
Project-URL: Repository, https://github.com/hanzoai/python-sdk
|
|
7
|
+
Project-URL: Documentation, https://docs.hanzo.ai/cli
|
|
8
|
+
Project-URL: Bug Tracker, https://github.com/hanzoai/python-sdk/issues
|
|
9
|
+
Author-email: Hanzo AI <dev@hanzo.ai>
|
|
10
|
+
Keywords: agents,ai,cli,hanzo,llm,local-ai,mcp,private-ai
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Environment :: Console
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
15
|
+
Classifier: Operating System :: OS Independent
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
23
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
24
|
+
Requires-Python: >=3.8
|
|
25
|
+
Requires-Dist: click>=8.1.0
|
|
26
|
+
Requires-Dist: hanzo-net>=0.1.12
|
|
27
|
+
Requires-Dist: httpx>=0.23.0
|
|
28
|
+
Requires-Dist: prompt-toolkit>=3.0.0
|
|
29
|
+
Requires-Dist: pydantic>=2.0.0
|
|
30
|
+
Requires-Dist: pyyaml>=6.0
|
|
31
|
+
Requires-Dist: rich>=13.0.0
|
|
32
|
+
Requires-Dist: typer>=0.9.0
|
|
33
|
+
Provides-Extra: agents
|
|
34
|
+
Requires-Dist: hanzo-agents>=0.1.0; extra == 'agents'
|
|
35
|
+
Requires-Dist: hanzo-network>=0.1.2; extra == 'agents'
|
|
36
|
+
Provides-Extra: ai
|
|
37
|
+
Requires-Dist: hanzoai>=1.0.0; extra == 'ai'
|
|
38
|
+
Provides-Extra: all
|
|
39
|
+
Requires-Dist: hanzo-aci>=0.2.8; extra == 'all'
|
|
40
|
+
Requires-Dist: hanzo-agents>=0.1.0; extra == 'all'
|
|
41
|
+
Requires-Dist: hanzo-mcp>=0.7.0; extra == 'all'
|
|
42
|
+
Requires-Dist: hanzo-memory>=1.0.0; extra == 'all'
|
|
43
|
+
Requires-Dist: hanzo-network>=0.1.2; extra == 'all'
|
|
44
|
+
Requires-Dist: hanzo-repl>=0.1.0; extra == 'all'
|
|
45
|
+
Requires-Dist: hanzoai>=1.0.0; extra == 'all'
|
|
46
|
+
Provides-Extra: dev
|
|
47
|
+
Requires-Dist: hanzo-aci>=0.2.8; extra == 'dev'
|
|
48
|
+
Provides-Extra: mcp
|
|
49
|
+
Requires-Dist: hanzo-mcp>=0.7.0; extra == 'mcp'
|
|
50
|
+
Provides-Extra: repl
|
|
51
|
+
Requires-Dist: hanzo-repl>=0.1.0; extra == 'repl'
|
|
52
|
+
Provides-Extra: router
|
|
53
|
+
Description-Content-Type: text/markdown
|
|
54
|
+
|
|
55
|
+
# Hanzo AI - Complete AI Infrastructure Platform
|
|
56
|
+
|
|
57
|
+
The main SDK for the Hanzo AI ecosystem, providing unified access to all Hanzo tools and services.
|
|
58
|
+
|
|
59
|
+
## Installation
|
|
60
|
+
|
|
61
|
+
```bash
|
|
62
|
+
# Install base package with CLI
|
|
63
|
+
pip install hanzo
|
|
64
|
+
|
|
65
|
+
# Install with all components
|
|
66
|
+
pip install hanzo[all]
|
|
67
|
+
|
|
68
|
+
# Install specific components
|
|
69
|
+
pip install hanzo[ai] # AI SDK (same as standalone hanzoai package)
|
|
70
|
+
pip install hanzo[router] # LLM gateway router (replaces litellm)
|
|
71
|
+
pip install hanzo[mcp] # Model Context Protocol server
|
|
72
|
+
pip install hanzo[agents] # Agent runtime and orchestration
|
|
73
|
+
pip install hanzo[repl] # Interactive REPL with AI chat
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Features
|
|
77
|
+
|
|
78
|
+
- **Unified LLM Gateway**: Use `hanzo.router` instead of litellm for 100+ LLM providers
|
|
79
|
+
- **MCP Integration**: Full Model Context Protocol support for AI tools
|
|
80
|
+
- **Agent Runtime**: Build and deploy AI agents with the agent framework
|
|
81
|
+
- **Interactive REPL**: Chat with AI models directly from the command line
|
|
82
|
+
- **Complete SDK**: Import all Hanzo components from a single package
|
|
83
|
+
|
|
84
|
+
## Quick Start
|
|
85
|
+
|
|
86
|
+
### Command Line
|
|
87
|
+
```bash
|
|
88
|
+
# Main CLI
|
|
89
|
+
hanzo --help
|
|
90
|
+
|
|
91
|
+
# Start MCP server
|
|
92
|
+
hanzo-mcp
|
|
93
|
+
|
|
94
|
+
# Interactive AI chat
|
|
95
|
+
hanzo-ai
|
|
96
|
+
hanzo-chat
|
|
97
|
+
|
|
98
|
+
# REPL interface
|
|
99
|
+
hanzo-repl
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
### Python SDK
|
|
103
|
+
```python
|
|
104
|
+
import hanzo
|
|
105
|
+
|
|
106
|
+
# Use router for LLM calls (replaces litellm)
|
|
107
|
+
from hanzo import router
|
|
108
|
+
response = router.completion(
|
|
109
|
+
model="gpt-4",
|
|
110
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Use agents
|
|
114
|
+
from hanzo import Agent, Network
|
|
115
|
+
agent = Agent(name="assistant")
|
|
116
|
+
|
|
117
|
+
# Use MCP tools
|
|
118
|
+
from hanzo import Tool, MCPServer
|
|
119
|
+
|
|
120
|
+
# Access AI SDK
|
|
121
|
+
from hanzo import Client
|
|
122
|
+
client = Client(api_key="...")
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
## Components
|
|
126
|
+
|
|
127
|
+
- **hanzo.router**: Unified LLM gateway (replaces litellm)
|
|
128
|
+
- **hanzo.mcp**: Model Context Protocol server and tools
|
|
129
|
+
- **hanzo.agents**: Agent runtime and orchestration
|
|
130
|
+
- **hanzo.memory**: Memory systems for agents
|
|
131
|
+
- **hanzo.Client**: Main AI SDK client
|
|
132
|
+
|
|
133
|
+
## Documentation
|
|
134
|
+
|
|
135
|
+
- [Hanzo AI Docs](https://docs.hanzo.ai)
|
|
136
|
+
- [Router Documentation](https://docs.hanzo.ai/router)
|
|
137
|
+
- [MCP Documentation](https://docs.hanzo.ai/mcp)
|
|
138
|
+
- [Agent Documentation](https://docs.hanzo.ai/agents)
|
hanzo-0.3.7/README.md
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# Hanzo AI - Complete AI Infrastructure Platform
|
|
2
|
+
|
|
3
|
+
The main SDK for the Hanzo AI ecosystem, providing unified access to all Hanzo tools and services.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
# Install base package with CLI
|
|
9
|
+
pip install hanzo
|
|
10
|
+
|
|
11
|
+
# Install with all components
|
|
12
|
+
pip install hanzo[all]
|
|
13
|
+
|
|
14
|
+
# Install specific components
|
|
15
|
+
pip install hanzo[ai] # AI SDK (same as standalone hanzoai package)
|
|
16
|
+
pip install hanzo[router] # LLM gateway router (replaces litellm)
|
|
17
|
+
pip install hanzo[mcp] # Model Context Protocol server
|
|
18
|
+
pip install hanzo[agents] # Agent runtime and orchestration
|
|
19
|
+
pip install hanzo[repl] # Interactive REPL with AI chat
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Features
|
|
23
|
+
|
|
24
|
+
- **Unified LLM Gateway**: Use `hanzo.router` instead of litellm for 100+ LLM providers
|
|
25
|
+
- **MCP Integration**: Full Model Context Protocol support for AI tools
|
|
26
|
+
- **Agent Runtime**: Build and deploy AI agents with the agent framework
|
|
27
|
+
- **Interactive REPL**: Chat with AI models directly from the command line
|
|
28
|
+
- **Complete SDK**: Import all Hanzo components from a single package
|
|
29
|
+
|
|
30
|
+
## Quick Start
|
|
31
|
+
|
|
32
|
+
### Command Line
|
|
33
|
+
```bash
|
|
34
|
+
# Main CLI
|
|
35
|
+
hanzo --help
|
|
36
|
+
|
|
37
|
+
# Start MCP server
|
|
38
|
+
hanzo-mcp
|
|
39
|
+
|
|
40
|
+
# Interactive AI chat
|
|
41
|
+
hanzo-ai
|
|
42
|
+
hanzo-chat
|
|
43
|
+
|
|
44
|
+
# REPL interface
|
|
45
|
+
hanzo-repl
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
### Python SDK
|
|
49
|
+
```python
|
|
50
|
+
import hanzo
|
|
51
|
+
|
|
52
|
+
# Use router for LLM calls (replaces litellm)
|
|
53
|
+
from hanzo import router
|
|
54
|
+
response = router.completion(
|
|
55
|
+
model="gpt-4",
|
|
56
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Use agents
|
|
60
|
+
from hanzo import Agent, Network
|
|
61
|
+
agent = Agent(name="assistant")
|
|
62
|
+
|
|
63
|
+
# Use MCP tools
|
|
64
|
+
from hanzo import Tool, MCPServer
|
|
65
|
+
|
|
66
|
+
# Access AI SDK
|
|
67
|
+
from hanzo import Client
|
|
68
|
+
client = Client(api_key="...")
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
## Components
|
|
72
|
+
|
|
73
|
+
- **hanzo.router**: Unified LLM gateway (replaces litellm)
|
|
74
|
+
- **hanzo.mcp**: Model Context Protocol server and tools
|
|
75
|
+
- **hanzo.agents**: Agent runtime and orchestration
|
|
76
|
+
- **hanzo.memory**: Memory systems for agents
|
|
77
|
+
- **hanzo.Client**: Main AI SDK client
|
|
78
|
+
|
|
79
|
+
## Documentation
|
|
80
|
+
|
|
81
|
+
- [Hanzo AI Docs](https://docs.hanzo.ai)
|
|
82
|
+
- [Router Documentation](https://docs.hanzo.ai/router)
|
|
83
|
+
- [MCP Documentation](https://docs.hanzo.ai/mcp)
|
|
84
|
+
- [Agent Documentation](https://docs.hanzo.ai/agents)
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "hanzo"
|
|
3
|
+
version = "0.3.7"
|
|
4
|
+
description = "Hanzo AI - Complete AI Infrastructure Platform with CLI, Router, MCP, and Agent Runtime"
|
|
5
|
+
authors = [
|
|
6
|
+
{name = "Hanzo AI", email = "dev@hanzo.ai"},
|
|
7
|
+
]
|
|
8
|
+
dependencies = [
|
|
9
|
+
"click>=8.1.0",
|
|
10
|
+
"rich>=13.0.0",
|
|
11
|
+
"typer>=0.9.0",
|
|
12
|
+
"prompt-toolkit>=3.0.0",
|
|
13
|
+
"httpx>=0.23.0",
|
|
14
|
+
"pydantic>=2.0.0",
|
|
15
|
+
"pyyaml>=6.0",
|
|
16
|
+
"hanzo-net>=0.1.12",
|
|
17
|
+
]
|
|
18
|
+
readme = "README.md"
|
|
19
|
+
requires-python = ">= 3.8"
|
|
20
|
+
keywords = ["ai", "cli", "hanzo", "agents", "llm", "mcp", "local-ai", "private-ai"]
|
|
21
|
+
classifiers = [
|
|
22
|
+
"Development Status :: 4 - Beta",
|
|
23
|
+
"Environment :: Console",
|
|
24
|
+
"Intended Audience :: Developers",
|
|
25
|
+
"License :: OSI Approved :: Apache Software License",
|
|
26
|
+
"Operating System :: OS Independent",
|
|
27
|
+
"Programming Language :: Python :: 3",
|
|
28
|
+
"Programming Language :: Python :: 3.8",
|
|
29
|
+
"Programming Language :: Python :: 3.9",
|
|
30
|
+
"Programming Language :: Python :: 3.10",
|
|
31
|
+
"Programming Language :: Python :: 3.11",
|
|
32
|
+
"Programming Language :: Python :: 3.12",
|
|
33
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
34
|
+
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
[project.scripts]
|
|
38
|
+
hanzo = "hanzo.cli:main"
|
|
39
|
+
hanzo-mcp = "hanzo.mcp_server:main"
|
|
40
|
+
hanzo-ai = "hanzo.repl:ai_chat"
|
|
41
|
+
hanzo-chat = "hanzo.repl:ai_chat"
|
|
42
|
+
hanzo-repl = "hanzo.repl:repl_main"
|
|
43
|
+
|
|
44
|
+
[project.optional-dependencies]
|
|
45
|
+
all = [
|
|
46
|
+
"hanzoai>=1.0.0",
|
|
47
|
+
"hanzo-mcp>=0.7.0",
|
|
48
|
+
"hanzo-agents>=0.1.0",
|
|
49
|
+
"hanzo-network>=0.1.2",
|
|
50
|
+
"hanzo-repl>=0.1.0",
|
|
51
|
+
"hanzo-memory>=1.0.0",
|
|
52
|
+
# "hanzo-router>=1.74.3", # TODO: Publish hanzo-router to PyPI
|
|
53
|
+
"hanzo-aci>=0.2.8",
|
|
54
|
+
]
|
|
55
|
+
ai = [
|
|
56
|
+
"hanzoai>=1.0.0", # This provides the same as standalone hanzoai package
|
|
57
|
+
]
|
|
58
|
+
router = [
|
|
59
|
+
# "hanzo-router>=1.74.3", # TODO: Publish hanzo-router to PyPI
|
|
60
|
+
]
|
|
61
|
+
mcp = [
|
|
62
|
+
"hanzo-mcp>=0.7.0",
|
|
63
|
+
]
|
|
64
|
+
agents = [
|
|
65
|
+
"hanzo-agents>=0.1.0",
|
|
66
|
+
"hanzo-network>=0.1.2",
|
|
67
|
+
]
|
|
68
|
+
dev = [
|
|
69
|
+
"hanzo-aci>=0.2.8",
|
|
70
|
+
]
|
|
71
|
+
repl = [
|
|
72
|
+
"hanzo-repl>=0.1.0",
|
|
73
|
+
# "hanzo-router>=1.74.3", # TODO: For AI chat functionality - publish to PyPI
|
|
74
|
+
]
|
|
75
|
+
|
|
76
|
+
[project.urls]
|
|
77
|
+
Homepage = "https://hanzo.ai"
|
|
78
|
+
Repository = "https://github.com/hanzoai/python-sdk"
|
|
79
|
+
Documentation = "https://docs.hanzo.ai/cli"
|
|
80
|
+
"Bug Tracker" = "https://github.com/hanzoai/python-sdk/issues"
|
|
81
|
+
|
|
82
|
+
[build-system]
|
|
83
|
+
requires = ["hatchling"]
|
|
84
|
+
build-backend = "hatchling.build"
|
|
85
|
+
|
|
86
|
+
[tool.hatch.build]
|
|
87
|
+
include = [
|
|
88
|
+
"src/hanzo",
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
[tool.hatch.build.targets.wheel]
|
|
92
|
+
packages = ["src/hanzo"]
|
|
@@ -1,15 +1,26 @@
|
|
|
1
1
|
"""Main CLI entry point for Hanzo."""
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
import sys
|
|
4
|
+
import asyncio
|
|
5
5
|
from typing import Optional
|
|
6
6
|
|
|
7
7
|
import click
|
|
8
8
|
from rich.console import Console
|
|
9
9
|
|
|
10
|
-
from .commands import
|
|
11
|
-
|
|
10
|
+
from .commands import (
|
|
11
|
+
mcp,
|
|
12
|
+
auth,
|
|
13
|
+
chat,
|
|
14
|
+
repl,
|
|
15
|
+
agent,
|
|
16
|
+
miner,
|
|
17
|
+
tools,
|
|
18
|
+
config,
|
|
19
|
+
cluster,
|
|
20
|
+
network,
|
|
21
|
+
)
|
|
12
22
|
from .utils.output import console
|
|
23
|
+
from .interactive.repl import HanzoREPL
|
|
13
24
|
|
|
14
25
|
# Version
|
|
15
26
|
__version__ = "0.2.10"
|
|
@@ -23,7 +34,7 @@ __version__ = "0.2.10"
|
|
|
23
34
|
@click.pass_context
|
|
24
35
|
def cli(ctx, verbose: bool, json: bool, config: Optional[str]):
|
|
25
36
|
"""Hanzo AI - Unified CLI for local, private, and free AI.
|
|
26
|
-
|
|
37
|
+
|
|
27
38
|
Run without arguments to enter interactive mode.
|
|
28
39
|
"""
|
|
29
40
|
# Ensure context object exists
|
|
@@ -32,14 +43,15 @@ def cli(ctx, verbose: bool, json: bool, config: Optional[str]):
|
|
|
32
43
|
ctx.obj["json"] = json
|
|
33
44
|
ctx.obj["config"] = config
|
|
34
45
|
ctx.obj["console"] = console
|
|
35
|
-
|
|
46
|
+
|
|
36
47
|
# If no subcommand, enter interactive mode or start compute node
|
|
37
48
|
if ctx.invoked_subcommand is None:
|
|
38
49
|
# Check if we should start as a compute node
|
|
39
50
|
import os
|
|
51
|
+
|
|
40
52
|
if os.environ.get("HANZO_COMPUTE_NODE") == "1":
|
|
41
53
|
# Start as a compute node
|
|
42
|
-
|
|
54
|
+
|
|
43
55
|
asyncio.run(start_compute_node(ctx))
|
|
44
56
|
else:
|
|
45
57
|
# Enter interactive REPL mode
|
|
@@ -90,9 +102,15 @@ def serve(ctx, name: str, port: int):
|
|
|
90
102
|
|
|
91
103
|
@cli.command()
|
|
92
104
|
@click.option("--name", "-n", help="Node name (auto-generated if not provided)")
|
|
93
|
-
@click.option(
|
|
94
|
-
|
|
95
|
-
|
|
105
|
+
@click.option(
|
|
106
|
+
"--port", "-p", default=52415, help="Node port (default: 52415 for hanzo/net)"
|
|
107
|
+
)
|
|
108
|
+
@click.option(
|
|
109
|
+
"--network", default="local", help="Network to join (mainnet/testnet/local)"
|
|
110
|
+
)
|
|
111
|
+
@click.option(
|
|
112
|
+
"--models", "-m", multiple=True, help="Models to serve (e.g., llama-3.2-3b)"
|
|
113
|
+
)
|
|
96
114
|
@click.option("--max-jobs", type=int, default=10, help="Max concurrent jobs")
|
|
97
115
|
@click.pass_context
|
|
98
116
|
def net(ctx, name: str, port: int, network: str, models: tuple, max_jobs: int):
|
|
@@ -102,9 +120,15 @@ def net(ctx, name: str, port: int, network: str, models: tuple, max_jobs: int):
|
|
|
102
120
|
|
|
103
121
|
@cli.command()
|
|
104
122
|
@click.option("--name", "-n", help="Node name (auto-generated if not provided)")
|
|
105
|
-
@click.option(
|
|
106
|
-
|
|
107
|
-
|
|
123
|
+
@click.option(
|
|
124
|
+
"--port", "-p", default=52415, help="Node port (default: 52415 for hanzo/net)"
|
|
125
|
+
)
|
|
126
|
+
@click.option(
|
|
127
|
+
"--network", default="local", help="Network to join (mainnet/testnet/local)"
|
|
128
|
+
)
|
|
129
|
+
@click.option(
|
|
130
|
+
"--models", "-m", multiple=True, help="Models to serve (e.g., llama-3.2-3b)"
|
|
131
|
+
)
|
|
108
132
|
@click.option("--max-jobs", type=int, default=10, help="Max concurrent jobs")
|
|
109
133
|
@click.pass_context
|
|
110
134
|
def node(ctx, name: str, port: int, network: str, models: tuple, max_jobs: int):
|
|
@@ -112,21 +136,26 @@ def node(ctx, name: str, port: int, network: str, models: tuple, max_jobs: int):
|
|
|
112
136
|
asyncio.run(start_compute_node(ctx, name, port, network, models, max_jobs))
|
|
113
137
|
|
|
114
138
|
|
|
115
|
-
async def start_compute_node(
|
|
116
|
-
|
|
117
|
-
|
|
139
|
+
async def start_compute_node(
|
|
140
|
+
ctx,
|
|
141
|
+
name: str = None,
|
|
142
|
+
port: int = 52415,
|
|
143
|
+
network: str = "mainnet",
|
|
144
|
+
models: tuple = None,
|
|
145
|
+
max_jobs: int = 10,
|
|
146
|
+
):
|
|
118
147
|
"""Start this instance as a compute node using hanzo/net."""
|
|
119
|
-
from .utils.net_check import check_net_installation
|
|
120
|
-
|
|
148
|
+
from .utils.net_check import check_net_installation
|
|
149
|
+
|
|
121
150
|
console = ctx.obj.get("console", Console())
|
|
122
|
-
|
|
151
|
+
|
|
123
152
|
console.print("[bold cyan]Starting Hanzo Net Compute Node[/bold cyan]")
|
|
124
153
|
console.print(f"Network: {network}")
|
|
125
154
|
console.print(f"Port: {port}")
|
|
126
|
-
|
|
155
|
+
|
|
127
156
|
# Check hanzo/net availability
|
|
128
157
|
is_available, net_path, python_exe = check_net_installation()
|
|
129
|
-
|
|
158
|
+
|
|
130
159
|
if not is_available:
|
|
131
160
|
console.print("[red]Error:[/red] hanzo-net is not installed")
|
|
132
161
|
console.print("\nTo install hanzo-net from PyPI:")
|
|
@@ -135,23 +164,23 @@ async def start_compute_node(ctx, name: str = None, port: int = 52415,
|
|
|
135
164
|
console.print(" git clone https://github.com/hanzoai/net.git ~/work/hanzo/net")
|
|
136
165
|
console.print(" cd ~/work/hanzo/net && pip install -e .")
|
|
137
166
|
return
|
|
138
|
-
|
|
167
|
+
|
|
139
168
|
try:
|
|
140
|
-
import subprocess
|
|
141
|
-
import sys
|
|
142
169
|
import os
|
|
143
|
-
|
|
170
|
+
import sys
|
|
171
|
+
import subprocess
|
|
172
|
+
|
|
144
173
|
# Use the checked net_path and python_exe
|
|
145
174
|
if not net_path:
|
|
146
175
|
# net is installed as a package
|
|
147
176
|
console.print("[green]✓[/green] Using installed hanzo/net")
|
|
148
|
-
|
|
177
|
+
|
|
149
178
|
# Set up sys.argv for net's argparse
|
|
150
179
|
original_argv = sys.argv.copy()
|
|
151
180
|
try:
|
|
152
181
|
# Build argv for net
|
|
153
182
|
sys.argv = ["hanzo-net"] # Program name
|
|
154
|
-
|
|
183
|
+
|
|
155
184
|
# Add options
|
|
156
185
|
if port != 52415:
|
|
157
186
|
sys.argv.extend(["--chatgpt-api-port", str(port)])
|
|
@@ -159,20 +188,22 @@ async def start_compute_node(ctx, name: str = None, port: int = 52415,
|
|
|
159
188
|
sys.argv.extend(["--node-id", name])
|
|
160
189
|
if network != "local":
|
|
161
190
|
sys.argv.extend(["--discovery-module", network])
|
|
162
|
-
if models
|
|
191
|
+
if models:
|
|
163
192
|
sys.argv.extend(["--default-model", models[0]])
|
|
164
|
-
|
|
193
|
+
|
|
165
194
|
# Import and run net
|
|
166
195
|
from net.main import run as net_run
|
|
167
|
-
|
|
196
|
+
|
|
168
197
|
console.print(f"\n[green]✓[/green] Node initialized")
|
|
169
198
|
console.print(f" Port: {port}")
|
|
170
|
-
console.print(
|
|
199
|
+
console.print(
|
|
200
|
+
f" Models: {', '.join(models) if models else 'auto-detect'}"
|
|
201
|
+
)
|
|
171
202
|
console.print("\n[bold green]Hanzo Net is running![/bold green]")
|
|
172
203
|
console.print("WebUI: http://localhost:52415")
|
|
173
204
|
console.print("API: http://localhost:52415/v1/chat/completions")
|
|
174
205
|
console.print("\nPress Ctrl+C to stop\n")
|
|
175
|
-
|
|
206
|
+
|
|
176
207
|
# Run net
|
|
177
208
|
await net_run()
|
|
178
209
|
finally:
|
|
@@ -184,28 +215,32 @@ async def start_compute_node(ctx, name: str = None, port: int = 52415,
|
|
|
184
215
|
console.print(f"[green]✓[/green] Using hanzo/net venv")
|
|
185
216
|
else:
|
|
186
217
|
console.print("[yellow]⚠[/yellow] Using system Python")
|
|
187
|
-
|
|
218
|
+
|
|
188
219
|
# Change to net directory and run
|
|
189
220
|
original_cwd = os.getcwd()
|
|
190
221
|
try:
|
|
191
222
|
os.chdir(net_path)
|
|
192
|
-
|
|
223
|
+
|
|
193
224
|
# Set up environment
|
|
194
225
|
env = os.environ.copy()
|
|
195
226
|
if models:
|
|
196
227
|
env["NET_MODELS"] = ",".join(models)
|
|
197
228
|
if name:
|
|
198
229
|
env["NET_NODE_NAME"] = name
|
|
199
|
-
env["PYTHONPATH"] =
|
|
200
|
-
|
|
230
|
+
env["PYTHONPATH"] = (
|
|
231
|
+
os.path.join(net_path, "src") + ":" + env.get("PYTHONPATH", "")
|
|
232
|
+
)
|
|
233
|
+
|
|
201
234
|
console.print(f"\n[green]✓[/green] Starting net node")
|
|
202
235
|
console.print(f" Port: {port}")
|
|
203
|
-
console.print(
|
|
236
|
+
console.print(
|
|
237
|
+
f" Models: {', '.join(models) if models else 'auto-detect'}"
|
|
238
|
+
)
|
|
204
239
|
console.print("\n[bold green]Hanzo Net is running![/bold green]")
|
|
205
240
|
console.print("WebUI: http://localhost:52415")
|
|
206
241
|
console.print("API: http://localhost:52415/v1/chat/completions")
|
|
207
242
|
console.print("\nPress Ctrl+C to stop\n")
|
|
208
|
-
|
|
243
|
+
|
|
209
244
|
# Build command line args
|
|
210
245
|
cmd_args = [python_exe, "-m", "net.main"]
|
|
211
246
|
if port != 52415:
|
|
@@ -214,22 +249,20 @@ async def start_compute_node(ctx, name: str = None, port: int = 52415,
|
|
|
214
249
|
cmd_args.extend(["--node-id", name])
|
|
215
250
|
if network != "local":
|
|
216
251
|
cmd_args.extend(["--discovery-module", network])
|
|
217
|
-
if models
|
|
252
|
+
if models:
|
|
218
253
|
cmd_args.extend(["--default-model", models[0]])
|
|
219
|
-
|
|
254
|
+
|
|
220
255
|
# Run net command with detected python
|
|
221
|
-
process = subprocess.run(
|
|
222
|
-
|
|
223
|
-
env=env,
|
|
224
|
-
check=False
|
|
225
|
-
)
|
|
226
|
-
|
|
256
|
+
process = subprocess.run(cmd_args, env=env, check=False)
|
|
257
|
+
|
|
227
258
|
if process.returncode != 0 and process.returncode != -2: # -2 is Ctrl+C
|
|
228
|
-
console.print(
|
|
229
|
-
|
|
259
|
+
console.print(
|
|
260
|
+
f"[red]Net exited with code {process.returncode}[/red]"
|
|
261
|
+
)
|
|
262
|
+
|
|
230
263
|
finally:
|
|
231
264
|
os.chdir(original_cwd)
|
|
232
|
-
|
|
265
|
+
|
|
233
266
|
except KeyboardInterrupt:
|
|
234
267
|
console.print("\n[yellow]Shutting down node...[/yellow]")
|
|
235
268
|
console.print("[green]✓[/green] Node stopped")
|
|
@@ -242,6 +275,7 @@ async def start_compute_node(ctx, name: str = None, port: int = 52415,
|
|
|
242
275
|
def dashboard(ctx):
|
|
243
276
|
"""Open interactive dashboard."""
|
|
244
277
|
from .interactive.dashboard import run_dashboard
|
|
278
|
+
|
|
245
279
|
run_dashboard()
|
|
246
280
|
|
|
247
281
|
|
|
@@ -258,4 +292,4 @@ def main():
|
|
|
258
292
|
|
|
259
293
|
|
|
260
294
|
if __name__ == "__main__":
|
|
261
|
-
main()
|
|
295
|
+
main()
|