openbee 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
openbee-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,70 @@
1
+ Metadata-Version: 2.4
2
+ Name: openbee
3
+ Version: 0.1.0
4
+ Summary: Python CLI for basic OpenBee commands
5
+ Author: openbee
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/sanpiekankan/openbee
8
+ Project-URL: Repository, https://github.com/sanpiekankan/openbee
9
+ Keywords: openbee,cli,ai,agent
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3 :: Only
12
+ Classifier: Programming Language :: Python :: 3.9
13
+ Classifier: Programming Language :: Python :: 3.10
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Environment :: Console
17
+ Requires-Python: >=3.9
18
+ Description-Content-Type: text/markdown
19
+
20
+ # OpenBee Python CLI
21
+
22
+ OpenBee Python CLI 是 OpenBee 的 Python 版本基础命令行工具,提供最小可用能力:
23
+
24
+ - 查看内置 Bee 角色
25
+ - 配置 LLM 参数(API Key、模型、Base URL、温度)
26
+ - 向指定角色发送任务并获取回复
27
+
28
+ ## 安装
29
+
30
+ ```bash
31
+ pip install openbee
32
+ ```
33
+
34
+ ## 快速开始
35
+
36
+ ### 1) 查看角色
37
+
38
+ ```bash
39
+ openbee list
40
+ ```
41
+
42
+ ### 2) 配置模型
43
+
44
+ ```bash
45
+ openbee config --api-key <YOUR_API_KEY> --model gpt-4o
46
+ ```
47
+
48
+ ### 3) 发起任务
49
+
50
+ ```bash
51
+ openbee ask worker "帮我规划一个后端服务结构"
52
+ ```
53
+
54
+ ## 配置说明
55
+
56
+ 默认配置文件位置:
57
+
58
+ ```text
59
+ ~/.openbee/config.json
60
+ ```
61
+
62
+ 你也可以通过环境变量覆盖配置目录:
63
+
64
+ ```bash
65
+ export OPENBEE_CONFIG_HOME=/path/to/custom/config
66
+ ```
67
+
68
+ ## 许可证
69
+
70
+ MIT
@@ -0,0 +1,51 @@
1
+ # OpenBee Python CLI
2
+
3
+ OpenBee Python CLI 是 OpenBee 的 Python 版本基础命令行工具,提供最小可用能力:
4
+
5
+ - 查看内置 Bee 角色
6
+ - 配置 LLM 参数(API Key、模型、Base URL、温度)
7
+ - 向指定角色发送任务并获取回复
8
+
9
+ ## 安装
10
+
11
+ ```bash
12
+ pip install openbee
13
+ ```
14
+
15
+ ## 快速开始
16
+
17
+ ### 1) 查看角色
18
+
19
+ ```bash
20
+ openbee list
21
+ ```
22
+
23
+ ### 2) 配置模型
24
+
25
+ ```bash
26
+ openbee config --api-key <YOUR_API_KEY> --model gpt-4o
27
+ ```
28
+
29
+ ### 3) 发起任务
30
+
31
+ ```bash
32
+ openbee ask worker "帮我规划一个后端服务结构"
33
+ ```
34
+
35
+ ## 配置说明
36
+
37
+ 默认配置文件位置:
38
+
39
+ ```text
40
+ ~/.openbee/config.json
41
+ ```
42
+
43
+ 你也可以通过环境变量覆盖配置目录:
44
+
45
+ ```bash
46
+ export OPENBEE_CONFIG_HOME=/path/to/custom/config
47
+ ```
48
+
49
+ ## 许可证
50
+
51
+ MIT
@@ -0,0 +1,38 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "openbee"
7
+ version = "0.1.0"
8
+ description = "Python CLI for basic OpenBee commands"
9
+ readme = "README.md"
10
+ requires-python = ">=3.9"
11
+ license = "MIT"
12
+ authors = [{ name = "openbee" }]
13
+ keywords = ["openbee", "cli", "ai", "agent"]
14
+ classifiers = [
15
+ "Programming Language :: Python :: 3",
16
+ "Programming Language :: Python :: 3 :: Only",
17
+ "Programming Language :: Python :: 3.9",
18
+ "Programming Language :: Python :: 3.10",
19
+ "Programming Language :: Python :: 3.11",
20
+ "Programming Language :: Python :: 3.12",
21
+ "Environment :: Console",
22
+ ]
23
+
24
+ [project.urls]
25
+ Homepage = "https://github.com/sanpiekankan/openbee"
26
+ Repository = "https://github.com/sanpiekankan/openbee"
27
+
28
+ [project.scripts]
29
+ openbee = "openbee_py.cli:main"
30
+
31
+ [tool.setuptools]
32
+ package-dir = {"" = "src"}
33
+
34
+ [tool.setuptools.packages.find]
35
+ where = ["src"]
36
+
37
+ [tool.pytest.ini_options]
38
+ testpaths = ["tests"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,70 @@
1
+ Metadata-Version: 2.4
2
+ Name: openbee
3
+ Version: 0.1.0
4
+ Summary: Python CLI for basic OpenBee commands
5
+ Author: openbee
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/sanpiekankan/openbee
8
+ Project-URL: Repository, https://github.com/sanpiekankan/openbee
9
+ Keywords: openbee,cli,ai,agent
10
+ Classifier: Programming Language :: Python :: 3
11
+ Classifier: Programming Language :: Python :: 3 :: Only
12
+ Classifier: Programming Language :: Python :: 3.9
13
+ Classifier: Programming Language :: Python :: 3.10
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Environment :: Console
17
+ Requires-Python: >=3.9
18
+ Description-Content-Type: text/markdown
19
+
20
+ # OpenBee Python CLI
21
+
22
+ OpenBee Python CLI 是 OpenBee 的 Python 版本基础命令行工具,提供最小可用能力:
23
+
24
+ - 查看内置 Bee 角色
25
+ - 配置 LLM 参数(API Key、模型、Base URL、温度)
26
+ - 向指定角色发送任务并获取回复
27
+
28
+ ## 安装
29
+
30
+ ```bash
31
+ pip install openbee
32
+ ```
33
+
34
+ ## 快速开始
35
+
36
+ ### 1) 查看角色
37
+
38
+ ```bash
39
+ openbee list
40
+ ```
41
+
42
+ ### 2) 配置模型
43
+
44
+ ```bash
45
+ openbee config --api-key <YOUR_API_KEY> --model gpt-4o
46
+ ```
47
+
48
+ ### 3) 发起任务
49
+
50
+ ```bash
51
+ openbee ask worker "帮我规划一个后端服务结构"
52
+ ```
53
+
54
+ ## 配置说明
55
+
56
+ 默认配置文件位置:
57
+
58
+ ```text
59
+ ~/.openbee/config.json
60
+ ```
61
+
62
+ 你也可以通过环境变量覆盖配置目录:
63
+
64
+ ```bash
65
+ export OPENBEE_CONFIG_HOME=/path/to/custom/config
66
+ ```
67
+
68
+ ## 许可证
69
+
70
+ MIT
@@ -0,0 +1,13 @@
1
+ README.md
2
+ pyproject.toml
3
+ src/openbee.egg-info/PKG-INFO
4
+ src/openbee.egg-info/SOURCES.txt
5
+ src/openbee.egg-info/dependency_links.txt
6
+ src/openbee.egg-info/entry_points.txt
7
+ src/openbee.egg-info/top_level.txt
8
+ src/openbee_py/__init__.py
9
+ src/openbee_py/__main__.py
10
+ src/openbee_py/cli.py
11
+ src/openbee_py/config.py
12
+ src/openbee_py/core.py
13
+ tests/test_cli.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ openbee = openbee_py.cli:main
@@ -0,0 +1 @@
1
+ openbee_py
@@ -0,0 +1,3 @@
1
+ __all__ = ["__version__"]
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,9 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+
5
+ from .cli import main
6
+
7
+
8
+ if __name__ == "__main__":
9
+ raise SystemExit(main(sys.argv[1:]))
@@ -0,0 +1,121 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import json
5
+ import sys
6
+ from typing import Sequence
7
+
8
+ from .config import load_config, update_llm_config
9
+ from .core import ask_openbee, get_role, list_roles
10
+
11
+
12
+ def _build_parser() -> argparse.ArgumentParser:
13
+ parser = argparse.ArgumentParser(
14
+ prog="openbee",
15
+ description="OpenBee Python CLI (basic commands)",
16
+ )
17
+ subparsers = parser.add_subparsers(dest="command")
18
+
19
+ subparsers.add_parser("list", help="List available bee roles")
20
+
21
+ config_parser = subparsers.add_parser("config", help="Show or update LLM config")
22
+ config_parser.add_argument("--provider", help="LLM provider name")
23
+ config_parser.add_argument("--api-key", help="LLM API key")
24
+ config_parser.add_argument("--model", help="LLM model name")
25
+ config_parser.add_argument("--base-url", help="OpenAI-compatible base URL")
26
+ config_parser.add_argument("--temperature", type=float, help="Sampling temperature")
27
+
28
+ ask_parser = subparsers.add_parser("ask", help="Ask a role to perform a task")
29
+ ask_parser.add_argument("role", help="Role id, e.g. worker")
30
+ ask_parser.add_argument("task", nargs="+", help="Task text")
31
+ ask_parser.add_argument("--api-key", help="Override API key")
32
+ ask_parser.add_argument("--model", help="Override model")
33
+ ask_parser.add_argument("--base-url", help="Override base URL")
34
+ ask_parser.add_argument("--temperature", type=float, help="Override temperature")
35
+
36
+ return parser
37
+
38
+
39
+ def _run_list() -> int:
40
+ roles = list_roles()
41
+ for role in roles:
42
+ print(f'{role["name"]} ({role["id"]})')
43
+ print(f' {role["description"]}')
44
+ return 0
45
+
46
+
47
+ def _run_config(args: argparse.Namespace) -> int:
48
+ has_updates = any(
49
+ value is not None
50
+ for value in (
51
+ args.provider,
52
+ args.api_key,
53
+ args.model,
54
+ args.base_url,
55
+ args.temperature,
56
+ )
57
+ )
58
+ if has_updates:
59
+ config = update_llm_config(
60
+ provider=args.provider,
61
+ api_key=args.api_key,
62
+ model=args.model,
63
+ base_url=args.base_url,
64
+ temperature=args.temperature,
65
+ )
66
+ print("Configuration updated.")
67
+ print(json.dumps(config, ensure_ascii=False, indent=2))
68
+ return 0
69
+
70
+ config = load_config()
71
+ print(json.dumps(config, ensure_ascii=False, indent=2))
72
+ return 0
73
+
74
+
75
+ def _run_ask(args: argparse.Namespace) -> int:
76
+ role = get_role(args.role)
77
+ if role is None:
78
+ print(f'Error: role "{args.role}" not found.', file=sys.stderr)
79
+ return 1
80
+
81
+ config = load_config().get("llm", {})
82
+ api_key = args.api_key or config.get("api_key", "")
83
+ model = args.model or config.get("model", "gpt-4o")
84
+ base_url = args.base_url or config.get("base_url", "https://api.openai.com/v1")
85
+ temperature = args.temperature if args.temperature is not None else float(config.get("temperature", 0.7))
86
+
87
+ if not api_key:
88
+ print('Error: API key missing. Run "openbee config --api-key <key>" first.', file=sys.stderr)
89
+ return 1
90
+
91
+ task = " ".join(args.task)
92
+ try:
93
+ content = ask_openbee(
94
+ role=role,
95
+ task=task,
96
+ api_key=api_key,
97
+ model=model,
98
+ base_url=base_url,
99
+ temperature=temperature,
100
+ )
101
+ except RuntimeError as exc:
102
+ print(f"Error: {exc}", file=sys.stderr)
103
+ return 1
104
+
105
+ print(content)
106
+ return 0
107
+
108
+
109
+ def main(argv: Sequence[str] | None = None) -> int:
110
+ parser = _build_parser()
111
+ args = parser.parse_args(argv)
112
+
113
+ if args.command == "list":
114
+ return _run_list()
115
+ if args.command == "config":
116
+ return _run_config(args)
117
+ if args.command == "ask":
118
+ return _run_ask(args)
119
+
120
+ parser.print_help()
121
+ return 0
@@ -0,0 +1,60 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import json
5
+ import os
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+
10
+ DEFAULT_CONFIG: dict[str, Any] = {
11
+ "llm": {
12
+ "provider": "openai",
13
+ "api_key": "",
14
+ "model": "gpt-4o",
15
+ "base_url": "https://api.openai.com/v1",
16
+ "temperature": 0.7,
17
+ }
18
+ }
19
+
20
+
21
+ def get_config_dir() -> Path:
22
+ custom = os.getenv("OPENBEE_CONFIG_HOME")
23
+ if custom:
24
+ return Path(custom).expanduser().resolve()
25
+ return Path.home() / ".openbee"
26
+
27
+
28
+ def get_config_path() -> Path:
29
+ return get_config_dir() / "config.json"
30
+
31
+
32
+ def load_config() -> dict[str, Any]:
33
+ path = get_config_path()
34
+ if not path.exists():
35
+ return copy.deepcopy(DEFAULT_CONFIG)
36
+ data = json.loads(path.read_text(encoding="utf-8"))
37
+ merged = copy.deepcopy(DEFAULT_CONFIG)
38
+ if isinstance(data, dict):
39
+ for key, value in data.items():
40
+ if isinstance(value, dict) and isinstance(merged.get(key), dict):
41
+ merged[key].update(value)
42
+ else:
43
+ merged[key] = value
44
+ return merged
45
+
46
+
47
+ def save_config(config: dict[str, Any]) -> None:
48
+ path = get_config_path()
49
+ path.parent.mkdir(parents=True, exist_ok=True)
50
+ path.write_text(json.dumps(config, ensure_ascii=False, indent=2), encoding="utf-8")
51
+
52
+
53
+ def update_llm_config(**kwargs: Any) -> dict[str, Any]:
54
+ config = load_config()
55
+ llm = config.setdefault("llm", {})
56
+ for key, value in kwargs.items():
57
+ if value is not None:
58
+ llm[key] = value
59
+ save_config(config)
60
+ return config
@@ -0,0 +1,95 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import urllib.error
5
+ import urllib.request
6
+ from typing import Any
7
+
8
+
9
+ ROLES: list[dict[str, str]] = [
10
+ {
11
+ "id": "worker",
12
+ "name": "Worker Bee",
13
+ "description": "General-purpose assistant for everyday tasks.",
14
+ "system_prompt": (
15
+ "You are a diligent Worker Bee in the OpenBee Hive. "
16
+ "Assist users with general tasks efficiently and precisely."
17
+ ),
18
+ },
19
+ {
20
+ "id": "researcher",
21
+ "name": "Researcher Bee",
22
+ "description": "Specialized assistant for research and information synthesis.",
23
+ "system_prompt": (
24
+ "You are a Researcher Bee in the OpenBee Hive. "
25
+ "Provide structured and accurate research-focused answers."
26
+ ),
27
+ },
28
+ {
29
+ "id": "architect",
30
+ "name": "Architect Bee",
31
+ "description": "Specialized assistant for system design and planning.",
32
+ "system_prompt": (
33
+ "You are an Architect Bee in the OpenBee Hive. "
34
+ "Design robust, maintainable solutions with clear trade-offs."
35
+ ),
36
+ },
37
+ ]
38
+
39
+
40
+ def list_roles() -> list[dict[str, str]]:
41
+ return ROLES
42
+
43
+
44
+ def get_role(role_id: str) -> dict[str, str] | None:
45
+ for role in ROLES:
46
+ if role["id"] == role_id:
47
+ return role
48
+ return None
49
+
50
+
51
+ def ask_openbee(
52
+ role: dict[str, str],
53
+ task: str,
54
+ api_key: str,
55
+ model: str,
56
+ base_url: str,
57
+ temperature: float = 0.7,
58
+ timeout: float = 60.0,
59
+ ) -> str:
60
+ endpoint = f"{base_url.rstrip('/')}/chat/completions"
61
+ payload = {
62
+ "model": model,
63
+ "temperature": temperature,
64
+ "messages": [
65
+ {"role": "system", "content": role["system_prompt"]},
66
+ {"role": "user", "content": task},
67
+ ],
68
+ }
69
+ req = urllib.request.Request(
70
+ endpoint,
71
+ data=json.dumps(payload).encode("utf-8"),
72
+ headers={
73
+ "Content-Type": "application/json",
74
+ "Authorization": f"Bearer {api_key}",
75
+ },
76
+ method="POST",
77
+ )
78
+ try:
79
+ with urllib.request.urlopen(req, timeout=timeout) as response:
80
+ raw = response.read().decode("utf-8")
81
+ except urllib.error.HTTPError as exc:
82
+ details = exc.read().decode("utf-8", errors="replace")
83
+ raise RuntimeError(f"request failed: {exc.code} {details}") from exc
84
+ except urllib.error.URLError as exc:
85
+ raise RuntimeError(f"request failed: {exc.reason}") from exc
86
+
87
+ data: dict[str, Any] = json.loads(raw)
88
+ choices = data.get("choices")
89
+ if not isinstance(choices, list) or not choices:
90
+ raise RuntimeError("invalid response: missing choices")
91
+ message = choices[0].get("message", {})
92
+ content = message.get("content")
93
+ if not isinstance(content, str) or not content.strip():
94
+ raise RuntimeError("invalid response: missing content")
95
+ return content
@@ -0,0 +1,55 @@
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import os
5
+ import tempfile
6
+ import unittest
7
+ from contextlib import redirect_stdout
8
+ from unittest.mock import patch
9
+
10
+ from openbee_py import cli
11
+ from openbee_py.config import get_config_path, load_config, update_llm_config
12
+
13
+
14
+ class TestOpenBeePyCli(unittest.TestCase):
15
+ def test_list_command(self) -> None:
16
+ buf = io.StringIO()
17
+ with redirect_stdout(buf):
18
+ code = cli.main(["list"])
19
+ output = buf.getvalue()
20
+ self.assertEqual(code, 0)
21
+ self.assertIn("Worker Bee (worker)", output)
22
+
23
+ def test_config_roundtrip(self) -> None:
24
+ with tempfile.TemporaryDirectory() as temp_dir:
25
+ with patch.dict(os.environ, {"OPENBEE_CONFIG_HOME": temp_dir}, clear=False):
26
+ code = cli.main(
27
+ [
28
+ "config",
29
+ "--api-key",
30
+ "test-key",
31
+ "--model",
32
+ "gpt-4o-mini",
33
+ ]
34
+ )
35
+ self.assertEqual(code, 0)
36
+ config = load_config()
37
+ self.assertEqual(config["llm"]["api_key"], "test-key")
38
+ self.assertEqual(config["llm"]["model"], "gpt-4o-mini")
39
+ self.assertTrue(get_config_path().exists())
40
+
41
+ def test_ask_command_uses_client(self) -> None:
42
+ with tempfile.TemporaryDirectory() as temp_dir:
43
+ with patch.dict(os.environ, {"OPENBEE_CONFIG_HOME": temp_dir}, clear=False):
44
+ update_llm_config(api_key="k", model="m", base_url="https://api.openai.com/v1")
45
+ with patch("openbee_py.cli.ask_openbee", return_value="ok") as mocked:
46
+ buf = io.StringIO()
47
+ with redirect_stdout(buf):
48
+ code = cli.main(["ask", "worker", "hello"])
49
+ self.assertEqual(code, 0)
50
+ self.assertIn("ok", buf.getvalue())
51
+ mocked.assert_called_once()
52
+
53
+
54
+ if __name__ == "__main__":
55
+ unittest.main()