onellmclient 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ {
2
+ "permissions": {
3
+ "allow": [
4
+ "Bash(.venv/bin/python:*)",
5
+ "Bash(.venv/bin/pytest tests/test_client.py::test_completion_tools -k anthropic -v -s)"
6
+ ],
7
+ "deny": [],
8
+ "ask": []
9
+ }
10
+ }
@@ -0,0 +1,15 @@
1
+ __pycache__/
2
+ *.pyc
3
+ *.pyo
4
+ *.pyd
5
+ *.egg-info/
6
+ .dist/
7
+ .build/
8
+ .pytest_cache/
9
+ .coverage
10
+ htmlcov/
11
+ .venv/
12
+ .uv/
13
+ .env
14
+ .mypy_cache/
15
+ .DS_Store
@@ -0,0 +1,120 @@
1
+ # CLAUDE.md
2
+
3
+ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4
+
5
+ ## Project Overview
6
+
7
+ Unified LLM Client - A Python library that provides a unified interface for OpenAI, Anthropic, and Gemini APIs. This allows applications to transparently switch between LLM providers without changing code.
8
+
9
+ **Key Design Principle**: Zero mandatory runtime dependencies. Each provider's SDK is optional and installed via extras.
10
+
11
+ ## Common Commands
12
+
13
+ ### Environment Setup
14
+ ```bash
15
+ # Create virtual environment and install package
16
+ uv venv
17
+ uv pip install -e .
18
+
19
+ # Install specific provider SDKs
20
+ uv pip install -e .[openai]
21
+ uv pip install -e .[anthropic]
22
+ uv pip install -e .[gemini]
23
+ uv pip install -e .[all]
24
+
25
+ # Install test dependencies
26
+ uv pip install -e .[test]
27
+ ```
28
+
29
+ ### Testing
30
+ ```bash
31
+ # Run all tests
32
+ pytest
33
+
34
+ # Run tests for a specific provider
35
+ pytest tests/test_client.py::test_completion_say_cow -k openai
36
+ pytest tests/test_client.py::test_completion_say_cow -k anthropic
37
+ pytest tests/test_client.py::test_completion_say_cow -k gemini
38
+
39
+ # Run a specific test function
40
+ pytest tests/test_client.py::test_completion_schema -v
41
+ ```
42
+
43
+ ### Building and Publishing
44
+ ```bash
45
+ # Build distribution packages
46
+ uv build
47
+
48
+ # Upload to TestPyPI
49
+ uv tool install twine
50
+ uv run twine upload --repository testpypi dist/*
51
+
52
+ # Upload to PyPI
53
+ uv run twine upload dist/*
54
+ ```
55
+
56
+ ## Architecture
57
+
58
+ ### Core Components
59
+
60
+ **`client.py`**: Main `Client` class that provides two methods:
61
+ - `completion()`: Single request/response interaction with provider
62
+ - `agent()`: Automatic tool call execution loop (up to 10 iterations)
63
+
64
+ Both methods accept unified parameters and return `UnifiedMessage` objects.
65
+
66
+ **`types.py`**: Pydantic models defining the unified message format:
67
+ - `UnifiedMessage`: Base message container with role and content
68
+ - `UnifiedTextMessageContent`: Text content blocks
69
+ - `UnifiedToolCallMessageContent`: Tool/function call requests
70
+ - `UnifiedToolResultMessageContent`: Tool execution results
71
+ - All types extend `UnifiedBaseModel` which adds `.get()` method for dict-like access
72
+
73
+ **`utils.py`**: Conversion utilities between unified format and provider-specific formats:
74
+ - `{provider}_messages()`: Convert `UnifiedMessage` → provider format
75
+ - `{provider}_response_convert()`: Convert provider response → `UnifiedMessage`
76
+ - `{provider}_tools()`: Convert unified tool definitions → provider format
77
+ - `openai_json_schema()`: Recursively adds `additionalProperties: false` for OpenAI strict mode
78
+
79
+ ### Key Architectural Patterns
80
+
81
+ 1. **Message Format Unification**: All providers use different message formats. The library normalizes them:
82
+ - OpenAI uses `response.output` with separate `message` and `function_call` types
83
+ - Anthropic uses `message.content` with `text` and `tool_use` blocks
84
+ - Gemini uses `content.parts` with `text` and `function_call` attributes
85
+
86
+ 2. **Parameter Translation**: Provider-specific parameters are translated:
87
+ - `temperature`: Anthropic uses 0-1 range (divided by 2), others use 0-2
88
+ - `instructions`: Maps to `instructions` (OpenAI), `system` (Anthropic), `system_instruction` (Gemini)
89
+ - `schema`: OpenAI uses native support, Anthropic uses system prompt injection, Gemini uses `response_schema`
90
+ - `tool_choice`: Unified `auto/none/required` maps to provider-specific values
91
+ - `reasoning_effort`: Maps to `reasoning.effort` (OpenAI), `thinking.budget_tokens` (Anthropic), `thinking_budget` (Gemini)
92
+
93
+ 3. **Tool Call Loop**: The `agent()` method automatically handles multi-turn tool execution by:
94
+ - Calling `completion()` with tools
95
+ - If response contains tool calls, execute handlers and append results to messages
96
+ - Repeat until no tool calls in response or max iterations (10) reached
97
+
98
+ 4. **Role Mapping**: Gemini uses `model` instead of `assistant`, converted in `gemini_messages()` and `gemini_response_convert()`
99
+
100
+ ## Development Constraints
101
+
102
+ - **Minimize implementation**: Only add features when needed
103
+ - **Preserve `.env`**: Never modify the environment file
104
+ - **Unit test coverage**: Only test modified code sections
105
+ - **Dependency versioning**: Use format `>=x.y.z,<x.y+1.0` to prevent breaking updates while allowing patches
106
+ - **Provider isolation**: Each provider implementation in `src/onellmclient/provider/{provider}/` is independent
107
+
108
+ ## Testing Requirements
109
+
110
+ Tests require API keys in `.env`:
111
+ ```
112
+ OPENAI_API_KEY=...
113
+ OPENAI_API_BASE=...
114
+ ANTHROPIC_API_KEY=...
115
+ ANTHROPIC_API_BASE=...
116
+ GEMINI_API_KEY=...
117
+ GEMINI_API_BASE=...
118
+ ```
119
+
120
+ All test functions are parameterized to run against all three providers with their respective models.
@@ -0,0 +1,43 @@
1
+ # CURSOR.md
2
+
3
+ 统一LLM接口客户端
4
+
5
+ ## 技术架构
6
+ - 语言:Python 3.9+
7
+ - 构建:hatchling(PEP 517/518)
8
+ - 环境管理:uv(推荐)
9
+ - 包布局:`src/` 结构
10
+
11
+ ## 项目结构
12
+ - `src/onellmclient/`:核心包
13
+ - `tests/`:单元测试
14
+
15
+ ## 开发约定
16
+ - 仅最小化实现,按需引入依赖(extras)。
17
+ - 单元测试仅覆盖改动部分。
18
+ - 禁止改动 `.env`。
19
+
20
+ ## 依赖管理策略
21
+ - 使用版本范围限制:`>=x.y.z,<x.y+1.0` 格式
22
+ - 防止破坏性更新,允许安全补丁更新
23
+ - 保持构建可重现性
24
+
25
+ ## 开发环境安装
26
+ ```bash
27
+ # 安装开发依赖
28
+ uv sync --extra dev
29
+
30
+ # 安装测试依赖
31
+ uv sync --extra test
32
+
33
+ # 安装所有开发测试依赖
34
+ uv sync --extra all-dev
35
+
36
+ # 安装特定 LLM 提供商依赖
37
+ uv sync --extra openai
38
+ ```
39
+
40
+ ## TODO 记录
41
+ - 初始化包与发布配置
42
+ - 实现统一 Client(OpenAI/Anthropic/Gemini)
43
+ - 增加 provider 适配层与最小集成测试
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,115 @@
1
+ Metadata-Version: 2.4
2
+ Name: onellmclient
3
+ Version: 0.1.0
4
+ Summary: A unified Python client to normalize interfaces across major LLM providers (OpenAI, Anthropic, Gemini).
5
+ Author-email: Your Name <you@example.com>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2025
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+ License-File: LICENSE
28
+ Keywords: anthropic,client,gemini,llm,openai,sdk
29
+ Classifier: License :: OSI Approved :: MIT License
30
+ Classifier: Operating System :: OS Independent
31
+ Classifier: Programming Language :: Python :: 3
32
+ Classifier: Programming Language :: Python :: 3 :: Only
33
+ Classifier: Programming Language :: Python :: 3.9
34
+ Classifier: Programming Language :: Python :: 3.10
35
+ Classifier: Programming Language :: Python :: 3.11
36
+ Classifier: Programming Language :: Python :: 3.12
37
+ Requires-Python: >=3.9
38
+ Provides-Extra: all
39
+ Requires-Dist: anthropic<1.0.0,>=0.68.0; extra == 'all'
40
+ Requires-Dist: google-genai<2.0.0,>=1.38.0; extra == 'all'
41
+ Requires-Dist: openai<2.0.0,>=1.109.1; extra == 'all'
42
+ Provides-Extra: anthropic
43
+ Requires-Dist: anthropic<1.0.0,>=0.68.0; extra == 'anthropic'
44
+ Provides-Extra: gemini
45
+ Requires-Dist: google-genai<2.0.0,>=1.38.0; extra == 'gemini'
46
+ Provides-Extra: openai
47
+ Requires-Dist: openai<2.0.0,>=1.109.1; extra == 'openai'
48
+ Provides-Extra: test
49
+ Requires-Dist: anthropic<1.0.0,>=0.68.0; extra == 'test'
50
+ Requires-Dist: google-genai<2.0.0,>=1.38.0; extra == 'test'
51
+ Requires-Dist: openai<2.0.0,>=1.109.1; extra == 'test'
52
+ Requires-Dist: pytest<9.0.0,>=8.4.2; extra == 'test'
53
+ Requires-Dist: python-dotenv<2.0,>=1.1.1; extra == 'test'
54
+ Description-Content-Type: text/markdown
55
+
56
+ # onellmclient
57
+
58
+ 统一主要 LLM 提供商(OpenAI、Anthropic、Gemini)接口格式的 Python 客户端。
59
+
60
+ - 运行时零强制依赖;通过 extras 按需安装各家 SDK。
61
+ - 提供统一的 `Client` 接口以便上层应用透明切换供应商。
62
+
63
+ ## 安装
64
+
65
+ 使用 uv(推荐):
66
+
67
+ ```bash
68
+ uv venv
69
+ uv pip install -e .
70
+ # 或安装某家 SDK:
71
+ uv pip install -e .[openai]
72
+ uv pip install -e .[anthropic]
73
+ uv pip install -e .[gemini]
74
+ uv pip install -e .[all]
75
+ ```
76
+
77
+ ## 构建与发布
78
+
79
+ 使用 uv 构建 sdist 与 wheel:
80
+
81
+ ```bash
82
+ uv build
83
+ ls dist/
84
+ ```
85
+
86
+ 发布到 TestPyPI:
87
+
88
+ ```bash
89
+ uv tool install twine # 首次需要安装
90
+ uv run twine upload --repository testpypi dist/*
91
+ # 安装测试:
92
+ uv pip install -i https://test.pypi.org/simple/ onellmclient==0.1.0
93
+ ```
94
+
95
+ 发布到 PyPI(确认版本号已递增且能在 TestPyPI 正常安装后):
96
+
97
+ ```bash
98
+ uv run twine upload dist/*
99
+ ```
100
+
101
+ ## 使用
102
+
103
+ ```python
104
+ from onellmclient import Client
105
+
106
+ client = Client(openai={"api_key": "..."})
107
+ resp = client.completion(
108
+ provider="openai", model="gpt-4o-mini", messages=[{"role":"user","content":"hi"}]
109
+ )
110
+ print(resp)
111
+ ```
112
+
113
+ ## 开源协议
114
+
115
+ MIT
@@ -0,0 +1,60 @@
1
+ # onellmclient
2
+
3
+ 统一主要 LLM 提供商(OpenAI、Anthropic、Gemini)接口格式的 Python 客户端。
4
+
5
+ - 运行时零强制依赖;通过 extras 按需安装各家 SDK。
6
+ - 提供统一的 `Client` 接口以便上层应用透明切换供应商。
7
+
8
+ ## 安装
9
+
10
+ 使用 uv(推荐):
11
+
12
+ ```bash
13
+ uv venv
14
+ uv pip install -e .
15
+ # 或安装某家 SDK:
16
+ uv pip install -e .[openai]
17
+ uv pip install -e .[anthropic]
18
+ uv pip install -e .[gemini]
19
+ uv pip install -e .[all]
20
+ ```
21
+
22
+ ## 构建与发布
23
+
24
+ 使用 uv 构建 sdist 与 wheel:
25
+
26
+ ```bash
27
+ uv build
28
+ ls dist/
29
+ ```
30
+
31
+ 发布到 TestPyPI:
32
+
33
+ ```bash
34
+ uv tool install twine # 首次需要安装
35
+ uv run twine upload --repository testpypi dist/*
36
+ # 安装测试:
37
+ uv pip install -i https://test.pypi.org/simple/ onellmclient==0.1.0
38
+ ```
39
+
40
+ 发布到 PyPI(确认版本号已递增且能在 TestPyPI 正常安装后):
41
+
42
+ ```bash
43
+ uv run twine upload dist/*
44
+ ```
45
+
46
+ ## 使用
47
+
48
+ ```python
49
+ from onellmclient import Client
50
+
51
+ client = Client(openai={"api_key": "..."})
52
+ resp = client.completion(
53
+ provider="openai", model="gpt-4o-mini", messages=[{"role":"user","content":"hi"}]
54
+ )
55
+ print(resp)
56
+ ```
57
+
58
+ ## 开源协议
59
+
60
+ MIT
@@ -0,0 +1,61 @@
1
+ [build-system]
2
+ requires = ["hatchling>=1.24.2"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "onellmclient"
7
+ version = "0.1.0"
8
+ description = "A unified Python client to normalize interfaces across major LLM providers (OpenAI, Anthropic, Gemini)."
9
+ authors = [{ name = "Your Name", email = "you@example.com" }]
10
+ license = { file = "LICENSE" }
11
+ readme = "README.md"
12
+ requires-python = ">=3.9"
13
+ keywords = ["llm", "openai", "anthropic", "gemini", "client", "sdk"]
14
+ classifiers = [
15
+ "Programming Language :: Python :: 3",
16
+ "Programming Language :: Python :: 3 :: Only",
17
+ "Programming Language :: Python :: 3.9",
18
+ "Programming Language :: Python :: 3.10",
19
+ "Programming Language :: Python :: 3.11",
20
+ "Programming Language :: Python :: 3.12",
21
+ "License :: OSI Approved :: MIT License",
22
+ "Operating System :: OS Independent",
23
+ ]
24
+
25
+ # 运行时硬依赖尽量为空;第三方SDK由用户按需安装(extras)
26
+ dependencies = []
27
+
28
+ [project.optional-dependencies]
29
+ openai = ["openai>=1.109.1,<2.0.0"]
30
+ anthropic = ["anthropic>=0.68.0,<1.0.0"]
31
+ gemini = ["google-genai>=1.38.0,<2.0.0"]
32
+ all = [
33
+ "openai>=1.109.1,<2.0.0",
34
+ "anthropic>=0.68.0,<1.0.0",
35
+ "google-genai>=1.38.0,<2.0.0",
36
+ ]
37
+
38
+ # 开发测试依赖
39
+ test = [
40
+ "openai>=1.109.1,<2.0.0",
41
+ "anthropic>=0.68.0,<1.0.0",
42
+ "google-genai>=1.38.0,<2.0.0",
43
+ "pytest>=8.4.2,<9.0.0",
44
+ "python-dotenv>=1.1.1,<2.0",
45
+ ]
46
+
47
+ [project.urls]
48
+
49
+
50
+ [tool.hatch.build.targets.wheel]
51
+ packages = ["src/onellmclient"]
52
+
53
+ [tool.hatch.version]
54
+ path = "src/onellmclient/__init__.py"
55
+
56
+ [tool.ruff]
57
+ line-length = 100
58
+
59
+ [tool.pytest.ini_options]
60
+ minversion = "7.0"
61
+ pythonpath = ["src"]
@@ -0,0 +1,6 @@
1
+ """Unified LLM Client package."""
2
+
3
+ from .client import Client
4
+
5
+ __all__ = ["Client"]
6
+ __version__ = "0.1.0"