local-openai2anthropic 0.3.3__tar.gz → 0.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/PKG-INFO +1 -1
  2. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/pyproject.toml +1 -1
  3. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/__init__.py +1 -1
  4. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/converter.py +2 -0
  5. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/router.py +18 -3
  6. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/.github/workflows/publish.yml +0 -0
  7. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/.gitignore +0 -0
  8. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/LICENSE +0 -0
  9. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/README.md +0 -0
  10. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/README_zh.md +0 -0
  11. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/examples/basic_chat.py +0 -0
  12. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/examples/streaming.py +0 -0
  13. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/examples/thinking_mode.py +0 -0
  14. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/examples/tool_calling.py +0 -0
  15. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/examples/vision.py +0 -0
  16. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/examples/web_search.py +0 -0
  17. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/__main__.py +0 -0
  18. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/config.py +0 -0
  19. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/daemon.py +0 -0
  20. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/daemon_runner.py +0 -0
  21. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/main.py +0 -0
  22. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/openai_types.py +0 -0
  23. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/protocol.py +0 -0
  24. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/server_tools/__init__.py +0 -0
  25. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/server_tools/base.py +0 -0
  26. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/server_tools/web_search.py +0 -0
  27. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/src/local_openai2anthropic/tavily_client.py +0 -0
  28. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/tests/__init__.py +0 -0
  29. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/tests/test_converter.py +0 -0
  30. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/tests/test_integration.py +0 -0
  31. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/tests/test_router.py +0 -0
  32. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/tests/test_upstream.sh +0 -0
  33. {local_openai2anthropic-0.3.3 → local_openai2anthropic-0.3.4}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: local-openai2anthropic
3
- Version: 0.3.3
3
+ Version: 0.3.4
4
4
  Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
5
5
  Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
6
6
  Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "local-openai2anthropic"
3
- version = "0.3.3"
3
+ version = "0.3.4"
4
4
  description = "A lightweight proxy server that converts Anthropic Messages API to OpenAI API"
5
5
  readme = "README.md"
6
6
  license = { text = "Apache-2.0" }
@@ -3,7 +3,7 @@
3
3
  local-openai2anthropic: A proxy server that converts Anthropic Messages API to OpenAI API.
4
4
  """
5
5
 
6
- __version__ = "0.2.5"
6
+ __version__ = "0.3.4"
7
7
 
8
8
  from local_openai2anthropic.protocol import (
9
9
  AnthropicError,
@@ -54,6 +54,7 @@ def convert_anthropic_to_openai(
54
54
  tools = anthropic_params.get("tools")
55
55
  top_k = anthropic_params.get("top_k")
56
56
  top_p = anthropic_params.get("top_p", 0.95)
57
+ repetition_penalty = anthropic_params.get("repetition_penalty", 1.1)
57
58
  thinking = anthropic_params.get("thinking")
58
59
  # metadata is accepted but not forwarded to OpenAI
59
60
 
@@ -102,6 +103,7 @@ def convert_anthropic_to_openai(
102
103
  "messages": openai_messages,
103
104
  "max_tokens": max_tokens,
104
105
  "stream": stream,
106
+ "repetition_penalty": repetition_penalty,
105
107
  }
106
108
 
107
109
  # Always include usage in stream for accurate token counting
@@ -179,6 +179,7 @@ async def _stream_response(
179
179
  output_tokens = 0
180
180
  message_id = None
181
181
  sent_message_delta = False
182
+ pending_text_prefix = ""
182
183
 
183
184
  async for line in response.aiter_lines():
184
185
  if not line.startswith("data: "):
@@ -299,6 +300,7 @@ async def _stream_response(
299
300
  # Handle reasoning content (thinking)
300
301
  if delta.get("reasoning_content"):
301
302
  reasoning = delta["reasoning_content"]
303
+ pending_text_prefix = ""
302
304
  # Start thinking content block if not already started
303
305
  if not content_block_started or current_block_type != "thinking":
304
306
  # Close previous block if exists
@@ -338,8 +340,16 @@ async def _stream_response(
338
340
  continue
339
341
 
340
342
  # Handle content
341
- if delta.get("content"):
343
+ if isinstance(delta.get("content"), str):
344
+ content_text = delta.get("content", "")
345
+ if not content_text:
346
+ continue
347
+ if content_text.strip() == "(no content)":
348
+ continue
342
349
  if not content_block_started or current_block_type != "text":
350
+ if not content_text.strip():
351
+ pending_text_prefix += content_text
352
+ continue
343
353
  # Close previous block if exists
344
354
  if content_block_started:
345
355
  stop_block = {
@@ -363,16 +373,21 @@ async def _stream_response(
363
373
  content_block_started = True
364
374
  current_block_type = "text"
365
375
 
366
- output_tokens += _count_tokens(delta["content"])
376
+ if pending_text_prefix:
377
+ content_text = pending_text_prefix + content_text
378
+ pending_text_prefix = ""
379
+
380
+ output_tokens += _count_tokens(content_text)
367
381
  delta_block = {
368
382
  "type": "content_block_delta",
369
383
  "index": content_block_index,
370
- "delta": {"type": "text_delta", "text": delta["content"]},
384
+ "delta": {"type": "text_delta", "text": content_text},
371
385
  }
372
386
  yield f"event: content_block_delta\ndata: {json.dumps(delta_block)}\n\n"
373
387
 
374
388
  # Handle tool calls
375
389
  if delta.get("tool_calls"):
390
+ pending_text_prefix = ""
376
391
  for tool_call in delta["tool_calls"]:
377
392
  tool_call_idx = tool_call.get("index", 0)
378
393