local-openai2anthropic 0.2.4__tar.gz → 0.2.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/PKG-INFO +2 -1
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/README.md +1 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/README_zh.md +1 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/pyproject.toml +1 -1
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/__init__.py +1 -1
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/converter.py +2 -2
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/router.py +2 -2
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/.github/workflows/publish.yml +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/.gitignore +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/LICENSE +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/examples/basic_chat.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/examples/streaming.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/examples/thinking_mode.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/examples/tool_calling.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/examples/vision.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/examples/web_search.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/__main__.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/config.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/daemon.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/daemon_runner.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/main.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/openai_types.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/protocol.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/server_tools/__init__.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/server_tools/base.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/server_tools/web_search.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/tavily_client.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/tests/__init__.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/tests/test_converter.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/tests/test_integration.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/tests/test_router.py +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/tests/test_upstream.sh +0 -0
- {local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: local-openai2anthropic
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.5
|
|
4
4
|
Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
|
|
5
5
|
Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
|
|
6
6
|
Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
|
|
@@ -55,6 +55,7 @@ This proxy translates Claude SDK calls to OpenAI API format in real-time, enabli
|
|
|
55
55
|
- **Offline development** without cloud API costs
|
|
56
56
|
- **Privacy-first AI** - data never leaves your machine
|
|
57
57
|
- **Seamless model switching** between cloud and local
|
|
58
|
+
- **Web Search tool** - built-in Tavily web search for local models
|
|
58
59
|
|
|
59
60
|
---
|
|
60
61
|
|
|
@@ -20,6 +20,7 @@ This proxy translates Claude SDK calls to OpenAI API format in real-time, enabli
|
|
|
20
20
|
- **Offline development** without cloud API costs
|
|
21
21
|
- **Privacy-first AI** - data never leaves your machine
|
|
22
22
|
- **Seamless model switching** between cloud and local
|
|
23
|
+
- **Web Search tool** - built-in Tavily web search for local models
|
|
23
24
|
|
|
24
25
|
---
|
|
25
26
|
|
|
@@ -49,11 +49,11 @@ def convert_anthropic_to_openai(
|
|
|
49
49
|
system = anthropic_params.get("system")
|
|
50
50
|
stop_sequences = anthropic_params.get("stop_sequences")
|
|
51
51
|
stream = anthropic_params.get("stream", False)
|
|
52
|
-
temperature = anthropic_params.get("temperature")
|
|
52
|
+
temperature = anthropic_params.get("temperature", 0.6)
|
|
53
53
|
tool_choice = anthropic_params.get("tool_choice")
|
|
54
54
|
tools = anthropic_params.get("tools")
|
|
55
55
|
top_k = anthropic_params.get("top_k")
|
|
56
|
-
top_p = anthropic_params.get("top_p")
|
|
56
|
+
top_p = anthropic_params.get("top_p", 0.95)
|
|
57
57
|
thinking = anthropic_params.get("thinking")
|
|
58
58
|
# metadata is accepted but not forwarded to OpenAI
|
|
59
59
|
|
{local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/router.py
RENAMED
|
@@ -406,7 +406,7 @@ async def _handle_with_server_tools(
|
|
|
406
406
|
async with httpx.AsyncClient(timeout=settings.request_timeout) as client:
|
|
407
407
|
try:
|
|
408
408
|
# Log full request for debugging
|
|
409
|
-
logger.
|
|
409
|
+
logger.debug(f"Request body: {json.dumps(params, indent=2, default=str)[:3000]}")
|
|
410
410
|
|
|
411
411
|
response = await client.post(url, headers=headers, json=params)
|
|
412
412
|
|
|
@@ -421,7 +421,7 @@ async def _handle_with_server_tools(
|
|
|
421
421
|
)
|
|
422
422
|
|
|
423
423
|
completion_data = response.json()
|
|
424
|
-
logger.
|
|
424
|
+
logger.debug(f"OpenAI response: {json.dumps(completion_data, indent=2)[:500]}...")
|
|
425
425
|
from openai.types.chat import ChatCompletion
|
|
426
426
|
completion = ChatCompletion.model_validate(completion_data)
|
|
427
427
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/__main__.py
RENAMED
|
File without changes
|
{local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/config.py
RENAMED
|
File without changes
|
{local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/daemon.py
RENAMED
|
File without changes
|
|
File without changes
|
{local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/main.py
RENAMED
|
File without changes
|
|
File without changes
|
{local_openai2anthropic-0.2.4 → local_openai2anthropic-0.2.5}/src/local_openai2anthropic/protocol.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|